Fix a bunch of non-final static fields (#119185)

Fixing almost all missing `final` spots, who knows maybe we get a small speedup from
some constant folding here and there.
This commit is contained in:
Armin Braun 2024-12-26 19:14:36 +01:00 committed by GitHub
parent 3c84517e75
commit e94f145350
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
140 changed files with 238 additions and 242 deletions

View file

@ -14,14 +14,14 @@ import org.elasticsearch.gradle.ElasticsearchDistributionType;
import java.util.List;
public class InternalElasticsearchDistributionTypes {
public static ElasticsearchDistributionType DEB = new DebElasticsearchDistributionType();
public static ElasticsearchDistributionType RPM = new RpmElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER = new DockerElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_IRONBANK = new DockerIronBankElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType();
public static final ElasticsearchDistributionType DEB = new DebElasticsearchDistributionType();
public static final ElasticsearchDistributionType RPM = new RpmElasticsearchDistributionType();
public static final ElasticsearchDistributionType DOCKER = new DockerElasticsearchDistributionType();
public static final ElasticsearchDistributionType DOCKER_IRONBANK = new DockerIronBankElasticsearchDistributionType();
public static final ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType();
public static final ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType();
public static List<ElasticsearchDistributionType> ALL_INTERNAL = List.of(
public static final List<ElasticsearchDistributionType> ALL_INTERNAL = List.of(
DEB,
RPM,
DOCKER,

View file

@ -30,7 +30,7 @@ public class TestRerunTaskExtension {
/**
* The name of the extension added to each test task.
*/
public static String NAME = "rerun";
public static final String NAME = "rerun";
private final Property<Integer> maxReruns;

View file

@ -12,6 +12,6 @@ package org.elasticsearch.gradle.distribution;
import org.elasticsearch.gradle.ElasticsearchDistributionType;
public class ElasticsearchDistributionTypes {
public static ElasticsearchDistributionType ARCHIVE = new ArchiveElasticsearchDistributionType();
public static ElasticsearchDistributionType INTEG_TEST_ZIP = new IntegTestZipElasticsearchDistributionType();
public static final ElasticsearchDistributionType ARCHIVE = new ArchiveElasticsearchDistributionType();
public static final ElasticsearchDistributionType INTEG_TEST_ZIP = new IntegTestZipElasticsearchDistributionType();
}

View file

@ -23,7 +23,7 @@ import java.util.Set;
public class PatternBank {
public static PatternBank EMPTY = new PatternBank(Map.of());
public static final PatternBank EMPTY = new PatternBank(Map.of());
private final Map<String, String> bank;

View file

@ -29,7 +29,7 @@ final class Constants {
/**
* sqrt(3) / 2.0
*/
public static double M_SQRT3_2 = 0.8660254037844386467637231707529361834714;
public static final double M_SQRT3_2 = 0.8660254037844386467637231707529361834714;
/**
* 2.0 * PI
*/
@ -37,19 +37,19 @@ final class Constants {
/**
* The number of H3 base cells
*/
public static int NUM_BASE_CELLS = 122;
public static final int NUM_BASE_CELLS = 122;
/**
* The number of vertices in a hexagon
*/
public static int NUM_HEX_VERTS = 6;
public static final int NUM_HEX_VERTS = 6;
/**
* The number of vertices in a pentagon
*/
public static int NUM_PENT_VERTS = 5;
public static final int NUM_PENT_VERTS = 5;
/**
* H3 index modes
*/
public static int H3_CELL_MODE = 1;
public static final int H3_CELL_MODE = 1;
/**
* square root of 7
*/
@ -64,14 +64,14 @@ final class Constants {
* (or distance between adjacent cell center points
* on the plane) to gnomonic unit length.
*/
public static double RES0_U_GNOMONIC = 0.38196601125010500003;
public static final double RES0_U_GNOMONIC = 0.38196601125010500003;
/**
* rotation angle between Class II and Class III resolution axes
* (asin(sqrt(3.0 / 28.0)))
*/
public static double M_AP7_ROT_RADS = 0.333473172251832115336090755351601070065900389;
public static final double M_AP7_ROT_RADS = 0.333473172251832115336090755351601070065900389;
/**
* threshold epsilon
*/
public static double EPSILON = 0.0000000000000001;
public static final double EPSILON = 0.0000000000000001;
}

View file

@ -33,7 +33,7 @@ public final class H3 {
/**
* max H3 resolution; H3 version 1 has 16 resolutions, numbered 0 through 15
*/
public static int MAX_H3_RES = 15;
public static final int MAX_H3_RES = 15;
private static final long[] NORTH = new long[MAX_H3_RES + 1];
private static final long[] SOUTH = new long[MAX_H3_RES + 1];

View file

@ -41,22 +41,22 @@ final class H3Index {
return BaseCells.isBaseCellPentagon(H3Index.H3_get_base_cell(h3)) && H3Index.h3LeadingNonZeroDigit(h3) == 0;
}
public static long H3_INIT = 35184372088831L;
public static final long H3_INIT = 35184372088831L;
/**
* The bit offset of the mode in an H3 index.
*/
public static int H3_MODE_OFFSET = 59;
public static final int H3_MODE_OFFSET = 59;
/**
* 1's in the 4 mode bits, 0's everywhere else.
*/
public static long H3_MODE_MASK = 15L << H3_MODE_OFFSET;
public static final long H3_MODE_MASK = 15L << H3_MODE_OFFSET;
/**
* 0's in the 4 mode bits, 1's everywhere else.
*/
public static long H3_MODE_MASK_NEGATIVE = ~H3_MODE_MASK;
public static final long H3_MODE_MASK_NEGATIVE = ~H3_MODE_MASK;
public static long H3_set_mode(long h3, long mode) {
return (h3 & H3_MODE_MASK_NEGATIVE) | (mode << H3_MODE_OFFSET);
@ -65,16 +65,16 @@ final class H3Index {
/**
* The bit offset of the base cell in an H3 index.
*/
public static int H3_BC_OFFSET = 45;
public static final int H3_BC_OFFSET = 45;
/**
* 1's in the 7 base cell bits, 0's everywhere else.
*/
public static long H3_BC_MASK = 127L << H3_BC_OFFSET;
public static final long H3_BC_MASK = 127L << H3_BC_OFFSET;
/**
* 0's in the 7 base cell bits, 1's everywhere else.
*/
public static long H3_BC_MASK_NEGATIVE = ~H3_BC_MASK;
public static final long H3_BC_MASK_NEGATIVE = ~H3_BC_MASK;
/**
* Sets the integer base cell of h3 to bc.
@ -83,26 +83,26 @@ final class H3Index {
return (h3 & H3_BC_MASK_NEGATIVE) | (bc << H3_BC_OFFSET);
}
public static int H3_RES_OFFSET = 52;
public static final int H3_RES_OFFSET = 52;
/**
* 1's in the 4 resolution bits, 0's everywhere else.
*/
public static long H3_RES_MASK = 15L << H3_RES_OFFSET;
public static final long H3_RES_MASK = 15L << H3_RES_OFFSET;
/**
* 0's in the 4 resolution bits, 1's everywhere else.
*/
public static long H3_RES_MASK_NEGATIVE = ~H3_RES_MASK;
public static final long H3_RES_MASK_NEGATIVE = ~H3_RES_MASK;
/**
* The bit offset of the max resolution digit in an H3 index.
*/
public static int H3_MAX_OFFSET = 63;
public static final int H3_MAX_OFFSET = 63;
/**
* 1 in the highest bit, 0's everywhere else.
*/
public static long H3_HIGH_BIT_MASK = (1L << H3_MAX_OFFSET);
public static final long H3_HIGH_BIT_MASK = (1L << H3_MAX_OFFSET);
/**
* Gets the highest bit of the H3 index.
@ -121,12 +121,12 @@ final class H3Index {
/**
* The bit offset of the reserved bits in an H3 index.
*/
public static int H3_RESERVED_OFFSET = 56;
public static final int H3_RESERVED_OFFSET = 56;
/**
* 1's in the 3 reserved bits, 0's everywhere else.
*/
public static long H3_RESERVED_MASK = (7L << H3_RESERVED_OFFSET);
public static final long H3_RESERVED_MASK = (7L << H3_RESERVED_OFFSET);
/**
* Gets a value in the reserved space. Should always be zero for valid indexes.
@ -149,12 +149,12 @@ final class H3Index {
/**
* The number of bits in a single H3 resolution digit.
*/
public static int H3_PER_DIGIT_OFFSET = 3;
public static final int H3_PER_DIGIT_OFFSET = 3;
/**
* 1's in the 3 bits of res 15 digit bits, 0's everywhere else.
*/
public static long H3_DIGIT_MASK = 7L;
public static final long H3_DIGIT_MASK = 7L;
/**
* Gets the resolution res integer digit (0-7) of h3.

View file

@ -20,8 +20,6 @@ import java.util.function.BiConsumer;
public class IngestDocumentBridge extends StableBridgeAPI.Proxy<IngestDocument> {
public static String INGEST_KEY = IngestDocument.INGEST_KEY;
public static IngestDocumentBridge wrap(final IngestDocument ingestDocument) {
if (ingestDocument == null) {
return null;

View file

@ -111,7 +111,7 @@ public class MergingDigest extends AbstractTDigest {
// based on accumulated k-index. This can be much faster since we
// scale functions are more expensive than the corresponding
// weight limits.
public static boolean useWeightLimit = true;
public static final boolean useWeightLimit = true;
static MergingDigest create(TDigestArrays arrays, double compression) {
arrays.adjustBreaker(SHALLOW_SIZE);

View file

@ -24,11 +24,11 @@ import org.elasticsearch.xcontent.ParseField;
*/
public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider<Analyzer> {
public static ParseField SEPARATOR = new ParseField("separator");
public static ParseField MAX_OUTPUT_SIZE = new ParseField("max_output_size");
public static final ParseField SEPARATOR = new ParseField("separator");
public static final ParseField MAX_OUTPUT_SIZE = new ParseField("max_output_size");
public static int DEFAULT_MAX_OUTPUT_SIZE = 255;
public static CharArraySet DEFAULT_STOP_WORDS = CharArraySet.EMPTY_SET;
public static final int DEFAULT_MAX_OUTPUT_SIZE = 255;
public static final CharArraySet DEFAULT_STOP_WORDS = CharArraySet.EMPTY_SET;
public static final char DEFAULT_SEPARATOR = ' ';
private final FingerprintAnalyzer analyzer;

View file

@ -67,7 +67,7 @@ public abstract class DotPrefixValidator<RequestType> implements MappedActionFil
".ml-state",
".ml-anomalies-unrelated"
);
public static Setting<List<String>> IGNORED_INDEX_PATTERNS_SETTING = Setting.stringListSetting(
public static final Setting<List<String>> IGNORED_INDEX_PATTERNS_SETTING = Setting.stringListSetting(
"cluster.indices.validate_ignored_dot_patterns",
List.of(
"\\.ml-state-\\d+",

View file

@ -27,7 +27,7 @@ import java.io.IOException;
import java.io.InputStream;
public class SearchTemplateResponse extends ActionResponse implements ToXContentObject {
public static ParseField TEMPLATE_OUTPUT_FIELD = new ParseField("template_output");
public static final ParseField TEMPLATE_OUTPUT_FIELD = new ParseField("template_output");
/** Contains the source of the rendered template **/
private BytesReference source;

View file

@ -359,7 +359,7 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
}
@Deprecated
public static Mapper.TypeParser PARSER = (name, node, parserContext) -> {
public static final Mapper.TypeParser PARSER = (name, node, parserContext) -> {
boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(parserContext.getSettings());
boolean coerceByDefault = COERCE_SETTING.get(parserContext.getSettings());
FieldMapper.Builder builder = new LegacyGeoShapeFieldMapper.Builder(

View file

@ -127,7 +127,7 @@ public class TokenCountFieldMapper extends FieldMapper {
}
}
public static TypeParser PARSER = new TypeParser((n, c) -> new Builder(n));
public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n));
private final boolean index;
private final boolean hasDocValues;

View file

@ -37,7 +37,7 @@ public class AzureStorageService {
* The maximum size of a BlockBlob block.
* See https://docs.microsoft.com/en-us/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs
*/
public static ByteSizeValue MAX_BLOCK_SIZE = new ByteSizeValue(100, ByteSizeUnit.MB);
public static final ByteSizeValue MAX_BLOCK_SIZE = new ByteSizeValue(100, ByteSizeUnit.MB);
/**
* The maximum number of blocks.

View file

@ -21,7 +21,7 @@ public record S3RepositoriesMetrics(
LongHistogram retryDeletesHistogram
) {
public static S3RepositoriesMetrics NOOP = new S3RepositoriesMetrics(RepositoriesMetrics.NOOP);
public static final S3RepositoriesMetrics NOOP = new S3RepositoriesMetrics(RepositoriesMetrics.NOOP);
public static final String METRIC_RETRY_EVENT_TOTAL = "es.repositories.s3.input_stream.retry.event.total";
public static final String METRIC_RETRY_SUCCESS_TOTAL = "es.repositories.s3.input_stream.retry.success.total";

View file

@ -91,7 +91,7 @@ public class Netty4Plugin extends Plugin implements NetworkPlugin {
*/
private static final ByteSizeValue MTU = ByteSizeValue.ofBytes(Long.parseLong(System.getProperty("es.net.mtu", "1500")));
private static final String SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = "http.netty.max_composite_buffer_components";
public static Setting<Integer> SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = new Setting<>(
public static final Setting<Integer> SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = new Setting<>(
SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS,
(s) -> {
ByteSizeValue maxContentLength = SETTING_HTTP_MAX_CONTENT_LENGTH.get(s);

View file

@ -167,7 +167,7 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
}
}
public static TypeParser PARSER = new TypeParser(
public static final TypeParser PARSER = new TypeParser(
(n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), SourceFieldMapper.isSynthetic(c.getIndexSettings()))
);

View file

@ -63,7 +63,7 @@ public class Murmur3FieldMapper extends FieldMapper {
}
}
public static TypeParser PARSER = new TypeParser((n, c) -> new Builder(n));
public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n));
// this only exists so a check can be done to match the field type to using murmur3 hashing...
public static class Murmur3FieldType extends MappedFieldType {

View file

@ -61,7 +61,7 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
public static final TransportVersion SETTINGS_IN_REQUEST_VERSION = TransportVersions.V_8_0_0;
public static int MAXIMUM_METADATA_BYTES = 1024; // chosen arbitrarily
public static final int MAXIMUM_METADATA_BYTES = 1024; // chosen arbitrarily
private String snapshot;

View file

@ -22,7 +22,7 @@ import java.util.Map;
import java.util.Optional;
public class GetShardSnapshotResponse extends ActionResponse {
public static GetShardSnapshotResponse EMPTY = new GetShardSnapshotResponse(null, Collections.emptyMap());
public static final GetShardSnapshotResponse EMPTY = new GetShardSnapshotResponse(null, Collections.emptyMap());
private final ShardSnapshotInfo latestShardSnapshot;
private final Map<String, RepositoryException> repositoryFailures;

View file

@ -54,7 +54,7 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
private boolean includeGlobalState = false;
private boolean partial = false;
private boolean includeAliases = true;
public static TransportVersion VERSION_SUPPORTING_QUIET_PARAMETER = TransportVersions.V_8_4_0;
public static final TransportVersion VERSION_SUPPORTING_QUIET_PARAMETER = TransportVersions.V_8_4_0;
private boolean quiet = false;
private Settings indexSettings = Settings.EMPTY;
private String[] ignoreIndexSettings = Strings.EMPTY_ARRAY;

View file

@ -612,7 +612,7 @@ public abstract class TransportBroadcastByNodeAction<
* which there is no shard-level return value.
*/
public static final class EmptyResult implements Writeable {
public static EmptyResult INSTANCE = new EmptyResult();
public static final EmptyResult INSTANCE = new EmptyResult();
private EmptyResult() {}

View file

@ -36,7 +36,7 @@ public record ClusterSnapshotStats(
List<PerRepositoryStats> statsByRepository
) implements ToXContentObject, Writeable {
public static ClusterSnapshotStats EMPTY = new ClusterSnapshotStats(0, 0, 0, 0, List.of());
public static final ClusterSnapshotStats EMPTY = new ClusterSnapshotStats(0, 0, 0, 0, List.of());
public static ClusterSnapshotStats of(ClusterState clusterState, long currentTimeMillis) {
return of(

View file

@ -427,7 +427,7 @@ public class ComposableIndexTemplate implements SimpleDiffable<ComposableIndexTe
/**
* A mapping snippet for a backing index with `_data_stream_timestamp` meta field mapper properly configured.
*/
public static CompressedXContent DATA_STREAM_MAPPING_SNIPPET;
public static final CompressedXContent DATA_STREAM_MAPPING_SNIPPET;
static {
try {

View file

@ -82,7 +82,7 @@ public final class DataStream implements SimpleDiffable<DataStream>, ToXContentO
public static final DateFormatter DATE_FORMATTER = DateFormatter.forPattern("uuuu.MM.dd");
public static final String TIMESTAMP_FIELD_NAME = "@timestamp";
// Timeseries indices' leaf readers should be sorted by desc order of their timestamp field, as it allows search time optimizations
public static Comparator<LeafReader> TIMESERIES_LEAF_READERS_SORTER = Comparator.comparingLong((LeafReader r) -> {
public static final Comparator<LeafReader> TIMESERIES_LEAF_READERS_SORTER = Comparator.comparingLong((LeafReader r) -> {
try {
PointValues points = r.getPointValues(TIMESTAMP_FIELD_NAME);
if (points != null) {

View file

@ -116,25 +116,25 @@ public class Metadata implements Iterable<IndexMetadata>, Diffable<Metadata>, Ch
/**
* Indicates that this custom metadata will be returned as part of an API call but will not be persisted
*/
public static EnumSet<XContentContext> API_ONLY = EnumSet.of(XContentContext.API);
public static final EnumSet<XContentContext> API_ONLY = EnumSet.of(XContentContext.API);
/**
* Indicates that this custom metadata will be returned as part of an API call and will be persisted between
* node restarts, but will not be a part of a snapshot global state
*/
public static EnumSet<XContentContext> API_AND_GATEWAY = EnumSet.of(XContentContext.API, XContentContext.GATEWAY);
public static final EnumSet<XContentContext> API_AND_GATEWAY = EnumSet.of(XContentContext.API, XContentContext.GATEWAY);
/**
* Indicates that this custom metadata will be returned as part of an API call and stored as a part of
* a snapshot global state, but will not be persisted between node restarts
*/
public static EnumSet<XContentContext> API_AND_SNAPSHOT = EnumSet.of(XContentContext.API, XContentContext.SNAPSHOT);
public static final EnumSet<XContentContext> API_AND_SNAPSHOT = EnumSet.of(XContentContext.API, XContentContext.SNAPSHOT);
/**
* Indicates that this custom metadata will be returned as part of an API call, stored as a part of
* a snapshot global state, and will be persisted between node restarts
*/
public static EnumSet<XContentContext> ALL_CONTEXTS = EnumSet.allOf(XContentContext.class);
public static final EnumSet<XContentContext> ALL_CONTEXTS = EnumSet.allOf(XContentContext.class);
/**
* Custom metadata that persists (via XContent) across restarts. The deserialization method for each implementation must be registered

View file

@ -153,7 +153,7 @@ public class DiscoveryNodeRole implements Comparable<DiscoveryNodeRole> {
/**
* Represents the role for a content node.
*/
public static DiscoveryNodeRole DATA_CONTENT_NODE_ROLE = new DiscoveryNodeRole("data_content", "s", true) {
public static final DiscoveryNodeRole DATA_CONTENT_NODE_ROLE = new DiscoveryNodeRole("data_content", "s", true) {
@Override
public boolean isEnabledByDefault(final Settings settings) {
@ -164,7 +164,7 @@ public class DiscoveryNodeRole implements Comparable<DiscoveryNodeRole> {
/**
* Represents the role for a hot node.
*/
public static DiscoveryNodeRole DATA_HOT_NODE_ROLE = new DiscoveryNodeRole("data_hot", "h", true) {
public static final DiscoveryNodeRole DATA_HOT_NODE_ROLE = new DiscoveryNodeRole("data_hot", "h", true) {
@Override
public boolean isEnabledByDefault(final Settings settings) {
@ -175,7 +175,7 @@ public class DiscoveryNodeRole implements Comparable<DiscoveryNodeRole> {
/**
* Represents the role for a warm node.
*/
public static DiscoveryNodeRole DATA_WARM_NODE_ROLE = new DiscoveryNodeRole("data_warm", "w", true) {
public static final DiscoveryNodeRole DATA_WARM_NODE_ROLE = new DiscoveryNodeRole("data_warm", "w", true) {
@Override
public boolean isEnabledByDefault(final Settings settings) {
@ -186,7 +186,7 @@ public class DiscoveryNodeRole implements Comparable<DiscoveryNodeRole> {
/**
* Represents the role for a cold node.
*/
public static DiscoveryNodeRole DATA_COLD_NODE_ROLE = new DiscoveryNodeRole("data_cold", "c", true) {
public static final DiscoveryNodeRole DATA_COLD_NODE_ROLE = new DiscoveryNodeRole("data_cold", "c", true) {
@Override
public boolean isEnabledByDefault(final Settings settings) {
@ -197,7 +197,7 @@ public class DiscoveryNodeRole implements Comparable<DiscoveryNodeRole> {
/**
* Represents the role for a frozen node.
*/
public static DiscoveryNodeRole DATA_FROZEN_NODE_ROLE = new DiscoveryNodeRole("data_frozen", "f", true) {
public static final DiscoveryNodeRole DATA_FROZEN_NODE_ROLE = new DiscoveryNodeRole("data_frozen", "f", true) {
@Override
public boolean isEnabledByDefault(final Settings settings) {

View file

@ -39,7 +39,7 @@ public record ClusterBalanceStats(
Map<String, NodeBalanceStats> nodes
) implements Writeable, ToXContentObject {
public static ClusterBalanceStats EMPTY = new ClusterBalanceStats(0, 0, Map.of(), Map.of());
public static final ClusterBalanceStats EMPTY = new ClusterBalanceStats(0, 0, Map.of(), Map.of());
public static ClusterBalanceStats createFrom(
ClusterState clusterState,

View file

@ -140,7 +140,7 @@ public class ClusterStateUpdateStats implements Writeable, ToXContentFragment {
out.writeVLong(failedNotificationElapsedMillis);
}
public static ClusterStateUpdateStats EMPTY = new ClusterStateUpdateStats(
public static final ClusterStateUpdateStats EMPTY = new ClusterStateUpdateStats(
0L,
0L,
0L,

View file

@ -39,7 +39,7 @@ public class DeprecationLogger {
* Deprecation messages are logged at this level.
* More serious that WARN by 1, but less serious than ERROR
*/
public static Level CRITICAL = Level.forName("CRITICAL", Level.WARN.intLevel() - 1);
public static final Level CRITICAL = Level.forName("CRITICAL", Level.WARN.intLevel() - 1);
private static volatile List<String> skipTheseDeprecations = Collections.emptyList();
private final Logger logger;

View file

@ -27,7 +27,7 @@ public final class SettingsFilter {
/**
* Can be used to specify settings filter that will be used to filter out matching settings in toXContent method
*/
public static String SETTINGS_FILTER_PARAM = "settings_filter";
public static final String SETTINGS_FILTER_PARAM = "settings_filter";
private final Set<String> patterns;
private final String patternString;

View file

@ -510,14 +510,14 @@ public class EsExecutors {
public static class TaskTrackingConfig {
// This is a random starting point alpha. TODO: revisit this with actual testing and/or make it configurable
public static double DEFAULT_EWMA_ALPHA = 0.3;
public static final double DEFAULT_EWMA_ALPHA = 0.3;
private final boolean trackExecutionTime;
private final boolean trackOngoingTasks;
private final double ewmaAlpha;
public static TaskTrackingConfig DO_NOT_TRACK = new TaskTrackingConfig(false, false, DEFAULT_EWMA_ALPHA);
public static TaskTrackingConfig DEFAULT = new TaskTrackingConfig(true, false, DEFAULT_EWMA_ALPHA);
public static final TaskTrackingConfig DO_NOT_TRACK = new TaskTrackingConfig(false, false, DEFAULT_EWMA_ALPHA);
public static final TaskTrackingConfig DEFAULT = new TaskTrackingConfig(true, false, DEFAULT_EWMA_ALPHA);
public TaskTrackingConfig(boolean trackOngoingTasks, double ewmaAlpha) {
this(true, trackOngoingTasks, ewmaAlpha);

View file

@ -30,7 +30,7 @@ import java.util.Objects;
*/
final class DefaultBuildVersion extends BuildVersion {
public static BuildVersion CURRENT = new DefaultBuildVersion(Version.CURRENT.id());
public static final BuildVersion CURRENT = new DefaultBuildVersion(Version.CURRENT.id());
final Version version;

View file

@ -30,7 +30,7 @@ public class Index implements Writeable, ToXContentObject {
public static final Index[] EMPTY_ARRAY = new Index[0];
public static Comparator<Index> COMPARE_BY_NAME = Comparator.comparing(Index::getName);
public static final Comparator<Index> COMPARE_BY_NAME = Comparator.comparing(Index::getName);
private static final String INDEX_UUID_KEY = "index_uuid";
private static final String INDEX_NAME_KEY = "index_name";

View file

@ -228,7 +228,7 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<GeoPoi
}
public static TypeParser PARSER = createTypeParserWithLegacySupport(
public static final TypeParser PARSER = createTypeParserWithLegacySupport(
(n, c) -> new Builder(
n,
c.scriptCompiler(),

View file

@ -104,7 +104,7 @@ public final class KeywordFieldMapper extends FieldMapper {
FIELD_TYPE = freezeAndDeduplicateFieldType(ft);
}
public static TextSearchInfo TEXT_SEARCH_INFO = new TextSearchInfo(
public static final TextSearchInfo TEXT_SEARCH_INFO = new TextSearchInfo(
FIELD_TYPE,
null,
Lucene.KEYWORD_ANALYZER,

View file

@ -14,5 +14,5 @@ package org.elasticsearch.index.mapper;
* Main purpose of this class is to avoid verbosity of passing individual metric instances around.
*/
public record MapperMetrics(SourceFieldMetrics sourceFieldMetrics) {
public static MapperMetrics NOOP = new MapperMetrics(SourceFieldMetrics.NOOP);
public static final MapperMetrics NOOP = new MapperMetrics(SourceFieldMetrics.NOOP);
}

View file

@ -49,7 +49,7 @@ public class TimeSeriesRoutingHashFieldMapper extends MetadataFieldMapper {
public static final TypeParser PARSER = new FixedTypeParser(c -> c.getIndexSettings().getMode().timeSeriesRoutingHashFieldMapper());
static final NodeFeature TS_ROUTING_HASH_FIELD_PARSES_BYTES_REF = new NodeFeature("tsdb.ts_routing_hash_doc_value_parse_byte_ref");
public static DocValueFormat TS_ROUTING_HASH_DOC_VALUE_FORMAT = TimeSeriesRoutingHashFieldType.DOC_VALUE_FORMAT;
public static final DocValueFormat TS_ROUTING_HASH_DOC_VALUE_FORMAT = TimeSeriesRoutingHashFieldType.DOC_VALUE_FORMAT;
static final class TimeSeriesRoutingHashFieldType extends MappedFieldType {

View file

@ -119,10 +119,11 @@ public class DenseVectorFieldMapper extends FieldMapper {
public static final IndexVersion LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION = IndexVersions.V_8_9_0;
public static final String CONTENT_TYPE = "dense_vector";
public static short MAX_DIMS_COUNT = 4096; // maximum allowed number of dimensions
public static int MAX_DIMS_COUNT_BIT = 4096 * Byte.SIZE; // maximum allowed number of dimensions
public static final short MAX_DIMS_COUNT = 4096; // maximum allowed number of dimensions
public static final int MAX_DIMS_COUNT_BIT = 4096 * Byte.SIZE; // maximum allowed number of dimensions
public static short MIN_DIMS_FOR_DYNAMIC_FLOAT_MAPPING = 128; // minimum number of dims for floats to be dynamically mapped to vector
public static final short MIN_DIMS_FOR_DYNAMIC_FLOAT_MAPPING = 128; // minimum number of dims for floats to be dynamically mapped to
// vector
public static final int MAGNITUDE_BYTES = 4;
public static final int NUM_CANDS_OVERSAMPLE_LIMIT = 10_000; // Max oversample allowed for k and num_candidates

View file

@ -41,9 +41,9 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder<SpanNearQueryBuil
public static final String NAME = "span_near";
/** Default for flag controlling whether matches are required to be in-order */
public static boolean DEFAULT_IN_ORDER = true;
public static final boolean DEFAULT_IN_ORDER = true;
/** Default slop value, this is the same that lucene {@link SpanNearQuery} uses if no slop is provided */
public static int DEFAULT_SLOP = 0;
public static final int DEFAULT_SLOP = 0;
private static final ParseField SLOP_FIELD = new ParseField("slop");
private static final ParseField CLAUSES_FIELD = new ParseField("clauses");

View file

@ -353,7 +353,7 @@ public class BulkByScrollTask extends CancellableTask {
public static final String THROTTLED_UNTIL_HR_FIELD = "throttled_until";
public static final String SLICES_FIELD = "slices";
public static Set<String> FIELDS_SET = new HashSet<>();
public static final Set<String> FIELDS_SET = new HashSet<>();
static {
FIELDS_SET.add(SLICE_ID_FIELD);
FIELDS_SET.add(TOTAL_FIELD);
@ -774,7 +774,7 @@ public class BulkByScrollTask extends CancellableTask {
private final Status status;
private final Exception exception;
public static Set<String> EXPECTED_EXCEPTION_FIELDS = new HashSet<>();
public static final Set<String> EXPECTED_EXCEPTION_FIELDS = new HashSet<>();
static {
EXPECTED_EXCEPTION_FIELDS.add("type");
EXPECTED_EXCEPTION_FIELDS.add("reason");

View file

@ -39,8 +39,8 @@ public class GlobalCheckpointSyncAction extends TransportReplicationAction<
GlobalCheckpointSyncAction.Request,
ReplicationResponse> {
public static String ACTION_NAME = "indices:admin/seq_no/global_checkpoint_sync";
public static ActionType<ReplicationResponse> TYPE = new ActionType<>(ACTION_NAME);
public static final String ACTION_NAME = "indices:admin/seq_no/global_checkpoint_sync";
public static final ActionType<ReplicationResponse> TYPE = new ActionType<>(ACTION_NAME);
@Inject
public GlobalCheckpointSyncAction(

View file

@ -118,7 +118,7 @@ public class RetentionLeases implements ToXContentFragment, Writeable {
* Represents an empty an un-versioned retention lease collection. This is used when no retention lease collection is found in the
* commit point
*/
public static RetentionLeases EMPTY = new RetentionLeases(1, 0, Collections.emptyList());
public static final RetentionLeases EMPTY = new RetentionLeases(1, 0, Collections.emptyList());
/**
* Constructs a new retention lease collection with the specified version and underlying collection of retention leases.

View file

@ -971,7 +971,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
public record Location(long generation, long translogLocation, int size) implements Comparable<Location> {
public static Location EMPTY = new Location(0, 0, 0);
public static final Location EMPTY = new Location(0, 0, 0);
@Override
public String toString() {

View file

@ -20,7 +20,7 @@ public record ExecutorNames(String threadPoolForGet, String threadPoolForSearch,
/**
* The thread pools for a typical system index.
*/
public static ExecutorNames DEFAULT_SYSTEM_INDEX_THREAD_POOLS = new ExecutorNames(
public static final ExecutorNames DEFAULT_SYSTEM_INDEX_THREAD_POOLS = new ExecutorNames(
ThreadPool.Names.SYSTEM_READ,
ThreadPool.Names.SYSTEM_READ,
ThreadPool.Names.SYSTEM_WRITE
@ -29,7 +29,7 @@ public record ExecutorNames(String threadPoolForGet, String threadPoolForSearch,
/**
* The thread pools for a typical system data stream. These are also the usual thread pools for non-system indices and data streams.
*/
public static ExecutorNames DEFAULT_SYSTEM_DATA_STREAM_THREAD_POOLS = new ExecutorNames(
public static final ExecutorNames DEFAULT_SYSTEM_DATA_STREAM_THREAD_POOLS = new ExecutorNames(
ThreadPool.Names.GET,
ThreadPool.Names.SEARCH,
ThreadPool.Names.WRITE
@ -38,7 +38,7 @@ public record ExecutorNames(String threadPoolForGet, String threadPoolForSearch,
/**
* The thread pools that should be used for critical system index operations.
*/
public static ExecutorNames CRITICAL_SYSTEM_INDEX_THREAD_POOLS = new ExecutorNames(
public static final ExecutorNames CRITICAL_SYSTEM_INDEX_THREAD_POOLS = new ExecutorNames(
ThreadPool.Names.SYSTEM_CRITICAL_READ,
ThreadPool.Names.SYSTEM_CRITICAL_READ,
ThreadPool.Names.SYSTEM_CRITICAL_WRITE

View file

@ -24,7 +24,7 @@ import java.util.Map;
public record EmptyTaskSettings() implements TaskSettings {
public static final String NAME = "empty_task_settings";
public static EmptyTaskSettings INSTANCE = new EmptyTaskSettings();
public static final EmptyTaskSettings INSTANCE = new EmptyTaskSettings();
public EmptyTaskSettings(StreamInput in) {
this();

View file

@ -31,7 +31,7 @@ public enum TaskType implements Writeable {
}
};
public static String NAME = "task_type";
public static final String NAME = "task_type";
public static TaskType fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));

View file

@ -33,7 +33,7 @@ public record RepositoriesMetrics(
LongHistogram httpRequestTimeInMillisHistogram
) {
public static RepositoriesMetrics NOOP = new RepositoriesMetrics(MeterRegistry.NOOP);
public static final RepositoriesMetrics NOOP = new RepositoriesMetrics(MeterRegistry.NOOP);
/**
* Is incremented for each request sent to the blob store (including retries)

View file

@ -955,7 +955,7 @@ public class RepositoriesService extends AbstractLifecycleComponent implements C
return preRestoreChecks;
}
public static String COUNT_USAGE_STATS_NAME = "count";
public static final String COUNT_USAGE_STATS_NAME = "count";
public RepositoryUsageStats getUsageStats() {
if (repositories.isEmpty()) {

View file

@ -1153,7 +1153,7 @@ public final class RepositoryData {
*/
public static class SnapshotDetails {
public static SnapshotDetails EMPTY = new SnapshotDetails(null, null, -1, -1, null);
public static final SnapshotDetails EMPTY = new SnapshotDetails(null, null, -1, -1, null);
@Nullable // TODO forbid nulls here, this only applies to very old repositories
private final SnapshotState snapshotState;

View file

@ -26,5 +26,5 @@ public class CreateIndexCapabilities {
*/
private static final String LOOKUP_INDEX_MODE_CAPABILITY = "lookup_index_mode";
public static Set<String> CAPABILITIES = Set.of(LOGSDB_INDEX_MODE_CAPABILITY, LOOKUP_INDEX_MODE_CAPABILITY);
public static final Set<String> CAPABILITIES = Set.of(LOGSDB_INDEX_MODE_CAPABILITY, LOOKUP_INDEX_MODE_CAPABILITY);
}

View file

@ -51,7 +51,7 @@ public class Metadata {
protected static final String IF_PRIMARY_TERM = "_if_primary_term";
protected static final String DYNAMIC_TEMPLATES = "_dynamic_templates";
public static FieldProperty<Object> ObjectField = new FieldProperty<>(Object.class);
public static final FieldProperty<Object> ObjectField = new FieldProperty<>(Object.class);
public static FieldProperty<String> StringField = new FieldProperty<>(String.class);
public static FieldProperty<Number> LongField = new FieldProperty<>(Number.class).withValidation(FieldProperty.LONGABLE_NUMBER);
@ -335,7 +335,7 @@ public class Metadata {
return new FieldProperty<>(type, nullable, writable, extendedValidation);
}
public static BiConsumer<String, Number> LONGABLE_NUMBER = (k, v) -> {
public static final BiConsumer<String, Number> LONGABLE_NUMBER = (k, v) -> {
long version = v.longValue();
// did we round?
if (v.doubleValue() == version) {
@ -346,7 +346,7 @@ public class Metadata {
);
};
public static FieldProperty<?> ALLOW_ALL = new FieldProperty<>(null, true, true, null);
public static final FieldProperty<?> ALLOW_ALL = new FieldProperty<>(null, true, true, null);
@SuppressWarnings("fallthrough")
public void check(MapOperation op, String key, Object value) {

View file

@ -170,8 +170,8 @@ public class ScriptedMetricAggContexts {
CombineScript newInstance(Map<String, Object> params, Map<String, Object> state);
}
public static String[] PARAMETERS = {};
public static ScriptContext<Factory> CONTEXT = new ScriptContext<>("aggs_combine", Factory.class);
public static final String[] PARAMETERS = {};
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("aggs_combine", Factory.class);
}
public abstract static class ReduceScript {
@ -198,6 +198,6 @@ public class ScriptedMetricAggContexts {
}
public static String[] PARAMETERS = {};
public static ScriptContext<Factory> CONTEXT = new ScriptContext<>("aggs_reduce", Factory.class);
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("aggs_reduce", Factory.class);
}
}

View file

@ -451,8 +451,8 @@ public class NetworkDisruption implements ServiceDisruptionScheme {
*/
public static class NetworkDelay extends NetworkLinkDisruptionType {
public static TimeValue DEFAULT_DELAY_MIN = TimeValue.timeValueSeconds(10);
public static TimeValue DEFAULT_DELAY_MAX = TimeValue.timeValueSeconds(90);
public static final TimeValue DEFAULT_DELAY_MIN = TimeValue.timeValueSeconds(10);
public static final TimeValue DEFAULT_DELAY_MAX = TimeValue.timeValueSeconds(90);
private final TimeValue delay;

View file

@ -96,7 +96,7 @@ public class BlobCacheMetrics {
this.cachePopulationTime = cachePopulationTime;
}
public static BlobCacheMetrics NOOP = new BlobCacheMetrics(TelemetryProvider.NOOP.getMeterRegistry());
public static final BlobCacheMetrics NOOP = new BlobCacheMetrics(TelemetryProvider.NOOP.getMeterRegistry());
public LongCounter getCacheMissCounter() {
return cacheMissCounter;

View file

@ -44,7 +44,7 @@ public class SharedBytes extends AbstractRefCounted {
);
private static final Logger logger = LogManager.getLogger(SharedBytes.class);
public static int PAGE_SIZE = 4096;
public static final int PAGE_SIZE = 4096;
private static final String CACHE_FILE_NAME = "shared_snapshot_cache";

View file

@ -34,8 +34,6 @@ import java.util.stream.Collectors;
*/
public final class ExpandedIdsMatcher {
public static String ALL = "_all";
/**
* Split {@code expression} into tokens separated by a ','
*

View file

@ -46,7 +46,6 @@ public class GetJobModelSnapshotsUpgradeStatsAction extends ActionType<GetJobMod
// Used for QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("model_snapshot_upgrades");
public static String TYPE = "model_snapshot_upgrade";
private GetJobModelSnapshotsUpgradeStatsAction() {
super(NAME);

View file

@ -102,7 +102,7 @@ public class DatafeedConfig implements SimpleDiffable<DatafeedConfig>, ToXConten
// Used for QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("datafeeds");
public static String TYPE = "datafeed";
public static final String TYPE = "datafeed";
/**
* The field name used to specify document counts in Elasticsearch

View file

@ -42,9 +42,9 @@ public class AllocationStatus implements Writeable, ToXContentObject {
}
}
public static ParseField ALLOCATION_COUNT = new ParseField("allocation_count");
public static ParseField TARGET_ALLOCATION_COUNT = new ParseField("target_allocation_count");
public static ParseField STATE = new ParseField("state");
public static final ParseField ALLOCATION_COUNT = new ParseField("allocation_count");
public static final ParseField TARGET_ALLOCATION_COUNT = new ParseField("target_allocation_count");
public static final ParseField STATE = new ParseField("state");
private static final ConstructingObjectParser<AllocationStatus, Void> PARSER = new ConstructingObjectParser<>(
"allocation_health",

View file

@ -13,8 +13,8 @@ import java.io.IOException;
public abstract class ChunkedNlpInferenceResults extends NlpInferenceResults {
public static String TEXT = "text";
public static String INFERENCE = "inference";
public static final String TEXT = "text";
public static final String INFERENCE = "inference";
ChunkedNlpInferenceResults(boolean isTruncated) {
super(isTruncated);

View file

@ -22,7 +22,7 @@ import java.util.Objects;
import java.util.stream.Collectors;
public class ClassificationInferenceResults extends SingleValueInferenceResults {
public static String PREDICTION_PROBABILITY = "prediction_probability";
public static final String PREDICTION_PROBABILITY = "prediction_probability";
public static final String NAME = "classification";

View file

@ -21,7 +21,7 @@ public class BertJapaneseTokenizationUpdate extends AbstractTokenizationUpdate {
public static final ParseField NAME = BertJapaneseTokenization.NAME;
public static ConstructingObjectParser<BertJapaneseTokenizationUpdate, Void> PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<BertJapaneseTokenizationUpdate, Void> PARSER = new ConstructingObjectParser<>(
"bert_japanese_tokenization_update",
a -> new BertJapaneseTokenizationUpdate(a[0] == null ? null : Tokenization.Truncate.fromString((String) a[0]), (Integer) a[1])
);

View file

@ -21,7 +21,7 @@ public class BertTokenizationUpdate extends AbstractTokenizationUpdate {
public static final ParseField NAME = BertTokenization.NAME;
public static ConstructingObjectParser<BertTokenizationUpdate, Void> PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<BertTokenizationUpdate, Void> PARSER = new ConstructingObjectParser<>(
"bert_tokenization_update",
a -> new BertTokenizationUpdate(a[0] == null ? null : Tokenization.Truncate.fromString((String) a[0]), (Integer) a[1])
);

View file

@ -29,7 +29,7 @@ public class ClassificationConfig implements LenientlyParsedInferenceConfig, Str
public static final ParseField PREDICTION_FIELD_TYPE = new ParseField("prediction_field_type");
private static final MlConfigVersion MIN_SUPPORTED_VERSION = MlConfigVersion.V_7_6_0;
public static ClassificationConfig EMPTY_PARAMS = new ClassificationConfig(
public static final ClassificationConfig EMPTY_PARAMS = new ClassificationConfig(
0,
DEFAULT_RESULTS_FIELD,
DEFAULT_TOP_CLASSES_RESULTS_FIELD,

View file

@ -32,7 +32,7 @@ public class ClassificationConfigUpdate implements InferenceConfigUpdate, NamedX
public static final ParseField NAME = ClassificationConfig.NAME;
public static ClassificationConfigUpdate EMPTY_PARAMS = new ClassificationConfigUpdate(null, null, null, null, null);
public static final ClassificationConfigUpdate EMPTY_PARAMS = new ClassificationConfigUpdate(null, null, null, null, null);
private final Integer numTopClasses;
private final String topClassesResultsField;

View file

@ -20,7 +20,7 @@ import java.util.Optional;
public class DebertaV2TokenizationUpdate extends AbstractTokenizationUpdate {
public static final ParseField NAME = new ParseField(DebertaV2Tokenization.NAME);
public static ConstructingObjectParser<DebertaV2TokenizationUpdate, Void> PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<DebertaV2TokenizationUpdate, Void> PARSER = new ConstructingObjectParser<>(
"deberta_v2_tokenization_update",
a -> new DebertaV2TokenizationUpdate(a[0] == null ? null : Tokenization.Truncate.fromString((String) a[0]), (Integer) a[1])
);

View file

@ -45,7 +45,7 @@ public class LearningToRankConfig extends RegressionConfig implements Rewriteabl
public static final ParseField FEATURE_EXTRACTORS = new ParseField("feature_extractors");
public static final ParseField DEFAULT_PARAMS = new ParseField("default_params");
public static LearningToRankConfig EMPTY_PARAMS = new LearningToRankConfig(null, null, null);
public static final LearningToRankConfig EMPTY_PARAMS = new LearningToRankConfig(null, null, null);
private static final ObjectParser<LearningToRankConfig.Builder, Boolean> LENIENT_PARSER = createParser(true);
private static final ObjectParser<LearningToRankConfig.Builder, Boolean> STRICT_PARSER = createParser(false);

View file

@ -25,7 +25,7 @@ public class RegressionConfig implements LenientlyParsedInferenceConfig, Strictl
private static final MlConfigVersion MIN_SUPPORTED_VERSION = MlConfigVersion.V_7_6_0;
public static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values");
public static RegressionConfig EMPTY_PARAMS = new RegressionConfig(DEFAULT_RESULTS_FIELD, null);
public static final RegressionConfig EMPTY_PARAMS = new RegressionConfig(DEFAULT_RESULTS_FIELD, null);
private static final ObjectParser<RegressionConfig.Builder, Void> LENIENT_PARSER = createParser(true);
private static final ObjectParser<RegressionConfig.Builder, Void> STRICT_PARSER = createParser(false);

View file

@ -29,7 +29,7 @@ public class RegressionConfigUpdate implements InferenceConfigUpdate, NamedXCont
public static final ParseField NAME = RegressionConfig.NAME;
public static RegressionConfigUpdate EMPTY_PARAMS = new RegressionConfigUpdate(null, null);
public static final RegressionConfigUpdate EMPTY_PARAMS = new RegressionConfigUpdate(null, null);
public static RegressionConfigUpdate fromMap(Map<String, Object> map) {
Map<String, Object> options = new HashMap<>(map);

View file

@ -20,7 +20,7 @@ import java.util.Optional;
public class RobertaTokenizationUpdate extends AbstractTokenizationUpdate {
public static final ParseField NAME = new ParseField(RobertaTokenization.NAME);
public static ConstructingObjectParser<RobertaTokenizationUpdate, Void> PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<RobertaTokenizationUpdate, Void> PARSER = new ConstructingObjectParser<>(
"roberta_tokenization_update",
a -> new RobertaTokenizationUpdate(a[0] == null ? null : Tokenization.Truncate.fromString((String) a[0]), (Integer) a[1])
);

View file

@ -29,7 +29,7 @@ public class TextEmbeddingConfig implements NlpConfig {
public static final String NAME = "text_embedding";
public static ParseField EMBEDDING_SIZE = new ParseField("embedding_size");
public static final ParseField EMBEDDING_SIZE = new ParseField("embedding_size");
public static TextEmbeddingConfig fromXContentStrict(XContentParser parser) {
return STRICT_PARSER.apply(parser, null);

View file

@ -29,7 +29,7 @@ public class TextEmbeddingConfigUpdate extends NlpConfigUpdate implements NamedX
public static final String NAME = TextEmbeddingConfig.NAME;
public static TextEmbeddingConfigUpdate EMPTY_INSTANCE = new TextEmbeddingConfigUpdate(null, null);
public static final TextEmbeddingConfigUpdate EMPTY_INSTANCE = new TextEmbeddingConfigUpdate(null, null);
public static TextEmbeddingConfigUpdate fromMap(Map<String, Object> map) {
Map<String, Object> options = new HashMap<>(map);

View file

@ -20,7 +20,7 @@ import java.util.Optional;
public class XLMRobertaTokenizationUpdate extends AbstractTokenizationUpdate {
public static final ParseField NAME = new ParseField(XLMRobertaTokenization.NAME);
public static ConstructingObjectParser<XLMRobertaTokenizationUpdate, Void> PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<XLMRobertaTokenizationUpdate, Void> PARSER = new ConstructingObjectParser<>(
"xlm_roberta_tokenization_update",
a -> new XLMRobertaTokenizationUpdate(a[0] == null ? null : Tokenization.Truncate.fromString((String) a[0]), (Integer) a[1])
);

View file

@ -34,7 +34,7 @@ public record QueryExtractorBuilder(String featureName, QueryProvider query, flo
public static final ParseField QUERY = new ParseField("query");
public static final ParseField DEFAULT_SCORE = new ParseField("default_score");
public static float DEFAULT_SCORE_DEFAULT = 0f;
public static final float DEFAULT_SCORE_DEFAULT = 0f;
private static final ConstructingObjectParser<QueryExtractorBuilder, Void> PARSER = new ConstructingObjectParser<>(
NAME.getPreferredName(),

View file

@ -94,7 +94,7 @@ public class ModelSnapshot implements ToXContentObject, Writeable {
return parser;
}
public static String EMPTY_SNAPSHOT_ID = "empty";
public static final String EMPTY_SNAPSHOT_ID = "empty";
private final String jobId;

View file

@ -29,7 +29,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
* @see AsyncSearchResponse
*/
public class SubmitAsyncSearchRequest extends ActionRequest {
public static long MIN_KEEP_ALIVE = TimeValue.timeValueSeconds(1).millis();
public static final long MIN_KEEP_ALIVE = TimeValue.timeValueSeconds(1).millis();
private TimeValue waitForCompletionTimeout = TimeValue.timeValueSeconds(1);
private boolean keepOnCompletion = false;

View file

@ -1198,7 +1198,7 @@ public final class Authentication implements ToXContentObject {
return FileRealmSettings.TYPE.equals(realmType) || NativeRealmSettings.TYPE.equals(realmType);
}
public static ConstructingObjectParser<RealmRef, Void> REALM_REF_PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<RealmRef, Void> REALM_REF_PARSER = new ConstructingObjectParser<>(
"realm_ref",
false,
(args, v) -> new RealmRef((String) args[0], (String) args[1], (String) args[2], (RealmDomain) args[3])

View file

@ -26,7 +26,7 @@ import java.util.Objects;
public final class AuthenticationResult<T> {
private static final AuthenticationResult<?> NOT_HANDLED = new AuthenticationResult<>(Status.CONTINUE, null, null, null, null);
public static String THREAD_CONTEXT_KEY = "_xpack_security_auth_result";
public static final String THREAD_CONTEXT_KEY = "_xpack_security_auth_result";
public enum Status {
/**

View file

@ -261,7 +261,7 @@ public class RealmConfig {
}
}
public static ConstructingObjectParser<RealmIdentifier, Void> REALM_IDENTIFIER_PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<RealmIdentifier, Void> REALM_IDENTIFIER_PARSER = new ConstructingObjectParser<>(
"realm_identifier",
false,
(args, v) -> new RealmIdentifier((String) args[0], (String) args[1])

View file

@ -59,7 +59,7 @@ public record RealmDomain(String name, Set<RealmConfig.RealmIdentifier> realms)
}
@SuppressWarnings("unchecked")
public static ConstructingObjectParser<RealmDomain, Void> REALM_DOMAIN_PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<RealmDomain, Void> REALM_DOMAIN_PARSER = new ConstructingObjectParser<>(
"realm_domain",
false,
(args, v) -> new RealmDomain((String) args[0], Set.copyOf((List<RealmConfig.RealmIdentifier>) args[1]))

View file

@ -24,7 +24,7 @@ import java.util.Set;
public record RoleDescriptorsIntersection(Collection<Set<RoleDescriptor>> roleDescriptorsList) implements ToXContentObject, Writeable {
public static RoleDescriptorsIntersection EMPTY = new RoleDescriptorsIntersection(Collections.emptyList());
public static final RoleDescriptorsIntersection EMPTY = new RoleDescriptorsIntersection(Collections.emptyList());
private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder()
.allowRestriction(true)

View file

@ -20,7 +20,7 @@ import java.util.function.Predicate;
public final class SystemPrivilege extends Privilege {
public static SystemPrivilege INSTANCE = new SystemPrivilege();
public static final SystemPrivilege INSTANCE = new SystemPrivilege();
private static final Predicate<String> ALLOWED_ACTIONS = StringMatcher.of(
"internal:*",

View file

@ -76,7 +76,7 @@ public final class Automatons {
static final char WILDCARD_ESCAPE = '\\'; // Escape character
// for testing only -Dtests.jvm.argline="-Dtests.automaton.record.patterns=true"
public static boolean recordPatterns = System.getProperty("tests.automaton.record.patterns", "false").equals("true");
public static final boolean recordPatterns = System.getProperty("tests.automaton.record.patterns", "false").equals("true");
private static final Map<Automaton, List<String>> patternsMap = new HashMap<>();
private Automatons() {}

View file

@ -20,7 +20,7 @@ import org.apache.logging.log4j.util.Supplier;
*/
public class NoOpLogger implements Logger {
public static NoOpLogger INSTANCE = new NoOpLogger();
public static final NoOpLogger INSTANCE = new NoOpLogger();
private NoOpLogger() {

View file

@ -33,8 +33,8 @@ import static org.apache.lucene.index.IndexWriter.MAX_TERM_LENGTH;
public final class TermsEnumRequest extends BroadcastRequest<TermsEnumRequest> implements ToXContentObject {
public static final IndicesOptions DEFAULT_INDICES_OPTIONS = SearchRequest.DEFAULT_INDICES_OPTIONS;
public static int DEFAULT_SIZE = 10;
public static TimeValue DEFAULT_TIMEOUT = new TimeValue(1000);
public static final int DEFAULT_SIZE = 10;
public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(1000);
private String field;
private String string = null;

View file

@ -99,7 +99,7 @@ public final class TransformField {
public static final String EXCLUDE_GENERATED = "exclude_generated";
// internal document id
public static String DOCUMENT_ID_FIELD = "_id";
public static final String DOCUMENT_ID_FIELD = "_id";
private TransformField() {}
}

View file

@ -46,8 +46,8 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr
*/
public class TransformCheckpoint implements Writeable, ToXContentObject {
public static String EMPTY_NAME = "_empty";
public static TransformCheckpoint EMPTY = createEmpty(0);
public static final String EMPTY_NAME = "_empty";
public static final TransformCheckpoint EMPTY = createEmpty(0);
public static TransformCheckpoint createEmpty(long timestampMillis) {
return new TransformCheckpoint(EMPTY_NAME, timestampMillis, -1L, Collections.emptyMap(), timestampMillis);

View file

@ -24,23 +24,23 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr
public class TransformIndexerStats extends IndexerJobStats {
public static final String NAME = "data_frame_indexer_transform_stats";
public static ParseField NUM_PAGES = new ParseField("pages_processed");
public static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed");
public static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed");
public static ParseField NUM_DELETED_DOCUMENTS = new ParseField("documents_deleted");
public static ParseField NUM_INVOCATIONS = new ParseField("trigger_count");
public static ParseField INDEX_TIME_IN_MS = new ParseField("index_time_in_ms");
public static ParseField SEARCH_TIME_IN_MS = new ParseField("search_time_in_ms");
public static ParseField PROCESSING_TIME_IN_MS = new ParseField("processing_time_in_ms");
public static ParseField DELETE_TIME_IN_MS = new ParseField("delete_time_in_ms");
public static ParseField INDEX_TOTAL = new ParseField("index_total");
public static ParseField SEARCH_TOTAL = new ParseField("search_total");
public static ParseField PROCESSING_TOTAL = new ParseField("processing_total");
public static ParseField SEARCH_FAILURES = new ParseField("search_failures");
public static ParseField INDEX_FAILURES = new ParseField("index_failures");
public static ParseField EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS = new ParseField("exponential_avg_checkpoint_duration_ms");
public static ParseField EXPONENTIAL_AVG_DOCUMENTS_INDEXED = new ParseField("exponential_avg_documents_indexed");
public static ParseField EXPONENTIAL_AVG_DOCUMENTS_PROCESSED = new ParseField("exponential_avg_documents_processed");
public static final ParseField NUM_PAGES = new ParseField("pages_processed");
public static final ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed");
public static final ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed");
public static final ParseField NUM_DELETED_DOCUMENTS = new ParseField("documents_deleted");
public static final ParseField NUM_INVOCATIONS = new ParseField("trigger_count");
public static final ParseField INDEX_TIME_IN_MS = new ParseField("index_time_in_ms");
public static final ParseField SEARCH_TIME_IN_MS = new ParseField("search_time_in_ms");
public static final ParseField PROCESSING_TIME_IN_MS = new ParseField("processing_time_in_ms");
public static final ParseField DELETE_TIME_IN_MS = new ParseField("delete_time_in_ms");
public static final ParseField INDEX_TOTAL = new ParseField("index_total");
public static final ParseField SEARCH_TOTAL = new ParseField("search_total");
public static final ParseField PROCESSING_TOTAL = new ParseField("processing_total");
public static final ParseField SEARCH_FAILURES = new ParseField("search_failures");
public static final ParseField INDEX_FAILURES = new ParseField("index_failures");
public static final ParseField EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS = new ParseField("exponential_avg_checkpoint_duration_ms");
public static final ParseField EXPONENTIAL_AVG_DOCUMENTS_INDEXED = new ParseField("exponential_avg_documents_indexed");
public static final ParseField EXPONENTIAL_AVG_DOCUMENTS_PROCESSED = new ParseField("exponential_avg_documents_processed");
// This changes how much "weight" past calculations have.
// The shorter the window, the less "smoothing" will occur.

View file

@ -34,7 +34,7 @@ public class DeleteAnalyticsCollectionAction {
public static class Request extends MasterNodeRequest<Request> implements ToXContentObject {
private final String collectionName;
public static ParseField COLLECTION_NAME_FIELD = new ParseField("collection_name");
public static final ParseField COLLECTION_NAME_FIELD = new ParseField("collection_name");
public Request(StreamInput in) throws IOException {
super(in);

View file

@ -35,7 +35,7 @@ public class GetAnalyticsCollectionAction {
public static class Request extends MasterNodeReadRequest<Request> implements ToXContentObject {
private final String[] names;
public static ParseField NAMES_FIELD = new ParseField("names");
public static final ParseField NAMES_FIELD = new ParseField("names");
public Request(TimeValue masterNodeTimeout, String[] names) {
super(masterNodeTimeout);

View file

@ -311,7 +311,7 @@ public class PostAnalyticsEventAction {
}
public static class Response extends ActionResponse implements ToXContentObject {
public static Response ACCEPTED = new Response(true);
public static final Response ACCEPTED = new Response(true);
public static Response readFromStreamInput(StreamInput in) throws IOException {
boolean accepted = in.readBoolean();

View file

@ -20,11 +20,11 @@ import static org.elasticsearch.common.Strings.requireNonBlank;
public class DocumentAnalyticsEventField {
public static ParseField DOCUMENT_FIELD = new ParseField("document");
public static final ParseField DOCUMENT_FIELD = new ParseField("document");
public static ParseField DOCUMENT_ID_FIELD = new ParseField("id");
public static final ParseField DOCUMENT_ID_FIELD = new ParseField("id");
public static ParseField DOCUMENT_INDEX_FIELD = new ParseField("index");
public static final ParseField DOCUMENT_INDEX_FIELD = new ParseField("index");
private static final ObjectParser<Map<String, String>, AnalyticsEvent.Context> PARSER = new ObjectParser<>(
DOCUMENT_FIELD.getPreferredName(),

View file

@ -17,13 +17,13 @@ import java.util.HashMap;
import java.util.Map;
public class PageAnalyticsEventField {
public static ParseField PAGE_FIELD = new ParseField("page");
public static final ParseField PAGE_FIELD = new ParseField("page");
public static ParseField PAGE_URL_FIELD = new ParseField("url");
public static final ParseField PAGE_URL_FIELD = new ParseField("url");
public static ParseField PAGE_TITLE_FIELD = new ParseField("title");
public static final ParseField PAGE_TITLE_FIELD = new ParseField("title");
public static ParseField PAGE_REFERRER_FIELD = new ParseField("referrer");
public static final ParseField PAGE_REFERRER_FIELD = new ParseField("referrer");
private static final ObjectParser<Map<String, String>, AnalyticsEvent.Context> PARSER = new ObjectParser<>(
PAGE_FIELD.getPreferredName(),

View file

@ -19,11 +19,11 @@ import java.util.Map;
public class PaginationAnalyticsEventField {
public static ParseField PAGINATION_FIELD = new ParseField("page");
public static final ParseField PAGINATION_FIELD = new ParseField("page");
public static ParseField CURRENT_PAGE_FIELD = new ParseField("current");
public static final ParseField CURRENT_PAGE_FIELD = new ParseField("current");
public static ParseField PAGE_SIZE_FIELD = new ParseField("size");
public static final ParseField PAGE_SIZE_FIELD = new ParseField("size");
private static final ObjectParser<Map<String, Integer>, AnalyticsEvent.Context> PARSER = new ObjectParser<>(
PAGINATION_FIELD.getPreferredName(),

View file

@ -21,13 +21,13 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.S
import static org.elasticsearch.xpack.application.analytics.event.parser.field.SortOrderAnalyticsEventField.SORT_FIELD;
public class SearchAnalyticsEventField {
public static ParseField SEARCH_FIELD = new ParseField("search");
public static final ParseField SEARCH_FIELD = new ParseField("search");
public static ParseField SEARCH_QUERY_FIELD = new ParseField("query");
public static final ParseField SEARCH_QUERY_FIELD = new ParseField("query");
public static ParseField SEARCH_APPLICATION_FIELD = new ParseField("search_application");
public static final ParseField SEARCH_APPLICATION_FIELD = new ParseField("search_application");
public static ParseField SEARCH_RESULTS_FIELD = new ParseField("results");
public static final ParseField SEARCH_RESULTS_FIELD = new ParseField("results");
private static final ObjectParser<Map<String, Object>, AnalyticsEvent.Context> PARSER = new ObjectParser<>(
SEARCH_FIELD.getPreferredName(),

View file

@ -19,7 +19,7 @@ import java.util.List;
import java.util.Map;
public class SearchFiltersAnalyticsEventField {
public static ParseField SEARCH_FILTERS_FIELD = new ParseField("filters");
public static final ParseField SEARCH_FILTERS_FIELD = new ParseField("filters");
private static final ObjectParser<Map<String, List<String>>, AnalyticsEvent.Context> PARSER = new ObjectParser<>(
SEARCH_FILTERS_FIELD.getPreferredName(),

View file

@ -20,9 +20,9 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.D
import static org.elasticsearch.xpack.application.analytics.event.parser.field.PageAnalyticsEventField.PAGE_FIELD;
public class SearchResultAnalyticsEventField {
public static ParseField SEARCH_RESULTS_TOTAL_FIELD = new ParseField("total_results");
public static final ParseField SEARCH_RESULTS_TOTAL_FIELD = new ParseField("total_results");
public static ParseField SEARCH_RESULT_ITEMS_FIELD = new ParseField("items");
public static final ParseField SEARCH_RESULT_ITEMS_FIELD = new ParseField("items");
private static final ObjectParser<Map<String, Object>, AnalyticsEvent.Context> PARSER = new ObjectParser<>(
"search_results",

View file

@ -21,9 +21,9 @@ import java.util.Map;
import static org.elasticsearch.common.Strings.requireNonBlank;
public class SessionAnalyticsEventField {
public static ParseField SESSION_FIELD = new ParseField("session");
public static final ParseField SESSION_FIELD = new ParseField("session");
public static ParseField SESSION_ID_FIELD = new ParseField("id");
public static final ParseField SESSION_ID_FIELD = new ParseField("id");
public static final ParseField CLIENT_ADDRESS_FIELD = new ParseField("ip");

Some files were not shown because too many files have changed in this diff Show more