mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-04-24 23:27:25 -04:00
Collapse transport versions for 8.17.0 (#124005)
This commit is contained in:
parent
152d086c0f
commit
22a7b5ea12
26 changed files with 48 additions and 102 deletions
|
@ -103,31 +103,7 @@ public class TransportVersions {
|
|||
public static final TransportVersion INITIAL_ELASTICSEARCH_8_16_6 = def(8_772_0_06);
|
||||
public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_0_00);
|
||||
public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_0_00);
|
||||
public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_0_00);
|
||||
public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_0_00);
|
||||
public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_0_00);
|
||||
public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_0_00);
|
||||
public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_0_00);
|
||||
public static final TransportVersion INDEX_REQUEST_REMOVE_METERING = def(8_780_0_00);
|
||||
public static final TransportVersion CPU_STAT_STRING_PARSING = def(8_781_0_00);
|
||||
public static final TransportVersion QUERY_RULES_RETRIEVER = def(8_782_0_00);
|
||||
public static final TransportVersion ESQL_CCS_EXEC_INFO_WITH_FAILURES = def(8_783_0_00);
|
||||
public static final TransportVersion LOGSDB_TELEMETRY = def(8_784_0_00);
|
||||
public static final TransportVersion LOGSDB_TELEMETRY_STATS = def(8_785_0_00);
|
||||
public static final TransportVersion KQL_QUERY_ADDED = def(8_786_0_00);
|
||||
public static final TransportVersion ROLE_MONITOR_STATS = def(8_787_0_00);
|
||||
public static final TransportVersion DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK = def(8_788_0_00);
|
||||
public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO = def(8_789_0_00);
|
||||
public static final TransportVersion VERTEX_AI_INPUT_TYPE_ADDED = def(8_790_0_00);
|
||||
public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE = def(8_791_0_00);
|
||||
public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_0_00);
|
||||
public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_0_00);
|
||||
public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_0_00);
|
||||
public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_0_00);
|
||||
public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_0_00);
|
||||
public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_0_00);
|
||||
public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17 = def(8_797_0_01);
|
||||
public static final TransportVersion SOURCE_MODE_TELEMETRY_FIX_8_17 = def(8_797_0_02);
|
||||
public static final TransportVersion V_8_17_0 = def(8_797_0_02);
|
||||
public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_3 = def(8_797_0_03);
|
||||
public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_4 = def(8_797_0_04);
|
||||
public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_0_00);
|
||||
|
|
|
@ -66,8 +66,7 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
super(in);
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) {
|
||||
version = in.readString();
|
||||
if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1)
|
||||
|| in.getTransportVersion().onOrAfter(TransportVersions.ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_1)) {
|
||||
compatibilityVersions = CompatibilityVersions.readVersion(in);
|
||||
} else {
|
||||
compatibilityVersions = new CompatibilityVersions(TransportVersion.readVersion(in), Map.of()); // unknown mappings versions
|
||||
|
@ -252,8 +251,7 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
} else {
|
||||
Version.writeVersion(Version.fromString(version), out);
|
||||
}
|
||||
if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1)
|
||||
|| out.getTransportVersion().onOrAfter(TransportVersions.ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_1)) {
|
||||
compatibilityVersions.writeTo(out);
|
||||
} else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) {
|
||||
TransportVersion.writeVersion(compatibilityVersions.transportVersion(), out);
|
||||
|
|
|
@ -291,8 +291,7 @@ public final class MappingStats implements ToXContentFragment, Writeable {
|
|||
}
|
||||
|
||||
private static boolean canReadOrWriteSourceModeTelemetry(TransportVersion version) {
|
||||
return version.isPatchFrom(TransportVersions.SOURCE_MODE_TELEMETRY_FIX_8_17)
|
||||
|| version.onOrAfter(TransportVersions.SOURCE_MODE_TELEMETRY);
|
||||
return version.isPatchFrom(TransportVersions.V_8_17_0) || version.onOrAfter(TransportVersions.SOURCE_MODE_TELEMETRY);
|
||||
}
|
||||
|
||||
private static OptionalLong ofNullable(Long l) {
|
||||
|
|
|
@ -55,15 +55,7 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse {
|
|||
IndicesStatsResponse(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
shards = in.readArray(ShardStats::new, ShardStats[]::new);
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS_REVERT)) {
|
||||
indexHealthMap = in.readMap(ClusterHealthStatus::readFrom);
|
||||
indexStateMap = in.readMap(IndexMetadata.State::readFrom);
|
||||
} else if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) {
|
||||
indexHealthMap = in.readMap(ClusterHealthStatus::readFrom);
|
||||
indexStateMap = in.readMap(IndexMetadata.State::readFrom);
|
||||
in.readMap(StreamInput::readStringCollectionAsList); // unused, reverted
|
||||
in.readMap(StreamInput::readLong); // unused, reverted
|
||||
} else if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) {
|
||||
// Between 8.1 and INDEX_STATS_ADDITIONAL_FIELDS, we had a different format for the response
|
||||
// where we only had health and state available.
|
||||
indexHealthMap = in.readMap(ClusterHealthStatus::readFrom);
|
||||
|
@ -186,15 +178,7 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeArray(shards);
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS_REVERT)) {
|
||||
out.writeMap(indexHealthMap, StreamOutput::writeWriteable);
|
||||
out.writeMap(indexStateMap, StreamOutput::writeWriteable);
|
||||
} else if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) {
|
||||
out.writeMap(indexHealthMap, StreamOutput::writeWriteable);
|
||||
out.writeMap(indexStateMap, StreamOutput::writeWriteable);
|
||||
out.writeMap(Map.of(), StreamOutput::writeStringCollection);
|
||||
out.writeMap(Map.of(), StreamOutput::writeLong);
|
||||
} else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) {
|
||||
out.writeMap(indexHealthMap, StreamOutput::writeWriteable);
|
||||
out.writeMap(indexStateMap, StreamOutput::writeWriteable);
|
||||
}
|
||||
|
|
|
@ -142,7 +142,7 @@ public class SimulateBulkRequest extends BulkRequest {
|
|||
componentTemplateSubstitutions = Map.of();
|
||||
indexTemplateSubstitutions = Map.of();
|
||||
}
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_MAPPING_ADDITION)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
this.mappingAddition = (Map<String, Object>) in.readGenericValue();
|
||||
} else {
|
||||
mappingAddition = Map.of();
|
||||
|
@ -157,7 +157,7 @@ public class SimulateBulkRequest extends BulkRequest {
|
|||
out.writeGenericValue(componentTemplateSubstitutions);
|
||||
out.writeGenericValue(indexTemplateSubstitutions);
|
||||
}
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_MAPPING_ADDITION)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
out.writeGenericValue(mappingAddition);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -203,7 +203,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
requireDataStream = false;
|
||||
}
|
||||
|
||||
if (in.getTransportVersion().before(TransportVersions.INDEX_REQUEST_REMOVE_METERING)) {
|
||||
if (in.getTransportVersion().before(TransportVersions.V_8_17_0)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) {
|
||||
in.readZLong(); // obsolete normalisedBytesParsed
|
||||
}
|
||||
|
@ -803,7 +803,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
out.writeBoolean(requireDataStream);
|
||||
}
|
||||
|
||||
if (out.getTransportVersion().before(TransportVersions.INDEX_REQUEST_REMOVE_METERING)) {
|
||||
if (out.getTransportVersion().before(TransportVersions.V_8_17_0)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) {
|
||||
out.writeZLong(-1); // obsolete normalisedBytesParsed
|
||||
}
|
||||
|
|
|
@ -911,7 +911,7 @@ public record IndicesOptions(
|
|||
}
|
||||
if (out.getTransportVersion()
|
||||
.between(TransportVersions.V_8_16_0, TransportVersions.REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX)) {
|
||||
if (out.getTransportVersion().before(TransportVersions.INTRODUCE_ALL_APPLICABLE_SELECTOR)) {
|
||||
if (out.getTransportVersion().before(TransportVersions.V_8_17_0)) {
|
||||
out.writeVInt(1); // Enum set sized 1
|
||||
out.writeVInt(0); // ordinal 0 (::data selector)
|
||||
} else {
|
||||
|
@ -955,7 +955,7 @@ public record IndicesOptions(
|
|||
if (in.getTransportVersion()
|
||||
.between(TransportVersions.V_8_16_0, TransportVersions.REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX)) {
|
||||
// Reading from an older node, which will be sending either an enum set or a single byte that needs to be read out and ignored.
|
||||
if (in.getTransportVersion().before(TransportVersions.INTRODUCE_ALL_APPLICABLE_SELECTOR)) {
|
||||
if (in.getTransportVersion().before(TransportVersions.V_8_17_0)) {
|
||||
int size = in.readVInt();
|
||||
for (int i = 0; i < size; i++) {
|
||||
in.readVInt();
|
||||
|
|
|
@ -587,7 +587,7 @@ public enum IndexMode {
|
|||
case STANDARD -> 0;
|
||||
case TIME_SERIES -> 1;
|
||||
case LOGSDB -> 2;
|
||||
case LOOKUP -> out.getTransportVersion().onOrAfter(TransportVersions.INDEX_MODE_LOOKUP) ? 3 : 0;
|
||||
case LOOKUP -> out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0) ? 3 : 0;
|
||||
};
|
||||
out.writeByte((byte) code);
|
||||
}
|
||||
|
|
|
@ -168,7 +168,7 @@ public final class PipelineConfiguration implements SimpleDiffable<PipelineConfi
|
|||
public static PipelineConfiguration readFrom(StreamInput in) throws IOException {
|
||||
final String id = in.readString();
|
||||
final Map<String, Object> config;
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.INGEST_PIPELINE_CONFIGURATION_AS_MAP)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
config = in.readGenericMap();
|
||||
} else {
|
||||
final BytesReference bytes = in.readSlicedBytesReference();
|
||||
|
@ -190,7 +190,7 @@ public final class PipelineConfiguration implements SimpleDiffable<PipelineConfi
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(id);
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.INGEST_PIPELINE_CONFIGURATION_AS_MAP)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
out.writeGenericMap(config);
|
||||
} else {
|
||||
XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).prettyPrint();
|
||||
|
|
|
@ -488,7 +488,7 @@ public class OsStats implements Writeable, ToXContentFragment {
|
|||
|
||||
Cgroup(final StreamInput in) throws IOException {
|
||||
cpuAcctControlGroup = in.readString();
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
cpuAcctUsageNanos = in.readBigInteger();
|
||||
} else {
|
||||
cpuAcctUsageNanos = BigInteger.valueOf(in.readLong());
|
||||
|
@ -505,7 +505,7 @@ public class OsStats implements Writeable, ToXContentFragment {
|
|||
@Override
|
||||
public void writeTo(final StreamOutput out) throws IOException {
|
||||
out.writeString(cpuAcctControlGroup);
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
out.writeBigInteger(cpuAcctUsageNanos);
|
||||
} else {
|
||||
out.writeLong(cpuAcctUsageNanos.longValue());
|
||||
|
@ -605,7 +605,7 @@ public class OsStats implements Writeable, ToXContentFragment {
|
|||
}
|
||||
|
||||
CpuStat(final StreamInput in) throws IOException {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
numberOfElapsedPeriods = in.readBigInteger();
|
||||
numberOfTimesThrottled = in.readBigInteger();
|
||||
timeThrottledNanos = in.readBigInteger();
|
||||
|
@ -618,7 +618,7 @@ public class OsStats implements Writeable, ToXContentFragment {
|
|||
|
||||
@Override
|
||||
public void writeTo(final StreamOutput out) throws IOException {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
out.writeBigInteger(numberOfElapsedPeriods);
|
||||
out.writeBigInteger(numberOfTimesThrottled);
|
||||
out.writeBigInteger(timeThrottledNanos);
|
||||
|
|
|
@ -288,8 +288,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
|
|||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) {
|
||||
rankBuilder = in.readOptionalNamedWriteable(RankBuilder.class);
|
||||
}
|
||||
if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1)
|
||||
|| in.getTransportVersion().onOrAfter(TransportVersions.SKIP_INNER_HITS_SEARCH_SOURCE)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_1)) {
|
||||
skipInnerHits = in.readBoolean();
|
||||
} else {
|
||||
skipInnerHits = false;
|
||||
|
@ -383,8 +382,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
|
|||
} else if (rankBuilder != null) {
|
||||
throw new IllegalArgumentException("cannot serialize [rank] to version [" + out.getTransportVersion().toReleaseVersion() + "]");
|
||||
}
|
||||
if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1)
|
||||
|| out.getTransportVersion().onOrAfter(TransportVersions.SKIP_INNER_HITS_SEARCH_SOURCE)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_1)) {
|
||||
out.writeBoolean(skipInnerHits);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ public final class LogsDBFeatureSetUsage extends XPackFeatureUsage {
|
|||
super(input);
|
||||
indicesCount = input.readVInt();
|
||||
indicesWithSyntheticSource = input.readVInt();
|
||||
if (input.getTransportVersion().onOrAfter(TransportVersions.LOGSDB_TELEMETRY_STATS)) {
|
||||
if (input.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
numDocs = input.readVLong();
|
||||
sizeInBytes = input.readVLong();
|
||||
} else {
|
||||
|
@ -36,7 +36,7 @@ public final class LogsDBFeatureSetUsage extends XPackFeatureUsage {
|
|||
sizeInBytes = 0;
|
||||
}
|
||||
var transportVersion = input.getTransportVersion();
|
||||
if (transportVersion.isPatchFrom(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17)
|
||||
if (transportVersion.isPatchFrom(TransportVersions.V_8_17_0)
|
||||
|| transportVersion.onOrAfter(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE)) {
|
||||
hasCustomCutoffDate = input.readBoolean();
|
||||
} else {
|
||||
|
@ -49,12 +49,12 @@ public final class LogsDBFeatureSetUsage extends XPackFeatureUsage {
|
|||
super.writeTo(out);
|
||||
out.writeVInt(indicesCount);
|
||||
out.writeVInt(indicesWithSyntheticSource);
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.LOGSDB_TELEMETRY_STATS)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
out.writeVLong(numDocs);
|
||||
out.writeVLong(sizeInBytes);
|
||||
}
|
||||
var transportVersion = out.getTransportVersion();
|
||||
if (transportVersion.isPatchFrom(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17)
|
||||
if (transportVersion.isPatchFrom(TransportVersions.V_8_17_0)
|
||||
|| transportVersion.onOrAfter(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE)) {
|
||||
out.writeBoolean(hasCustomCutoffDate);
|
||||
}
|
||||
|
@ -79,7 +79,7 @@ public final class LogsDBFeatureSetUsage extends XPackFeatureUsage {
|
|||
|
||||
@Override
|
||||
public TransportVersion getMinimalSupportedVersion() {
|
||||
return TransportVersions.LOGSDB_TELEMETRY;
|
||||
return TransportVersions.V_8_17_0;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -62,8 +62,7 @@ public class GetInferenceModelAction extends ActionType<GetInferenceModelAction.
|
|||
super(in);
|
||||
this.inferenceEntityId = in.readString();
|
||||
this.taskType = TaskType.fromStream(in);
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ)
|
||||
|| in.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_0)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) {
|
||||
this.persistDefaultConfig = in.readBoolean();
|
||||
} else {
|
||||
this.persistDefaultConfig = PERSIST_DEFAULT_CONFIGS;
|
||||
|
@ -88,8 +87,7 @@ public class GetInferenceModelAction extends ActionType<GetInferenceModelAction.
|
|||
super.writeTo(out);
|
||||
out.writeString(inferenceEntityId);
|
||||
taskType.writeTo(out);
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ)
|
||||
|| out.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_0)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) {
|
||||
out.writeBoolean(this.persistDefaultConfig);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,8 +32,6 @@ import java.util.Set;
|
|||
import java.util.TreeSet;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.TransportVersions.ROLE_MONITOR_STATS;
|
||||
|
||||
/**
|
||||
* Represents the set of permissions for remote clusters. This is intended to be the model for both the {@link RoleDescriptor}
|
||||
* and {@link Role}. This model is intended to be converted to local cluster permissions
|
||||
|
@ -71,6 +69,7 @@ import static org.elasticsearch.TransportVersions.ROLE_MONITOR_STATS;
|
|||
public class RemoteClusterPermissions implements NamedWriteable, ToXContentObject {
|
||||
|
||||
public static final TransportVersion ROLE_REMOTE_CLUSTER_PRIVS = TransportVersions.V_8_15_0;
|
||||
public static final TransportVersion ROLE_MONITOR_STATS = TransportVersions.V_8_17_0;
|
||||
|
||||
public static final String NAME = "remote_cluster_permissions";
|
||||
private static final Logger logger = LogManager.getLogger(RemoteClusterPermissions.class);
|
||||
|
|
|
@ -44,10 +44,10 @@ import java.util.function.Consumer;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Map.entry;
|
||||
import static org.elasticsearch.TransportVersions.ROLE_MONITOR_STATS;
|
||||
import static org.elasticsearch.xpack.core.security.authc.Authentication.VERSION_API_KEY_ROLES_AS_BYTES;
|
||||
import static org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo;
|
||||
import static org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfoTests.randomRoleDescriptorsIntersection;
|
||||
import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_MONITOR_STATS;
|
||||
import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS;
|
||||
import static org.hamcrest.Matchers.anEmptyMap;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.TransportVersions.ROLE_MONITOR_STATS;
|
||||
import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_MONITOR_STATS;
|
||||
import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS;
|
||||
import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.lastTransportVersionPermission;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
|
|
@ -67,8 +67,7 @@ public class DeprecationInfoAction extends ActionType<DeprecationInfoAction.Resp
|
|||
if (in.getTransportVersion().before(TransportVersions.RESOURCE_DEPRECATION_CHECKS)) {
|
||||
mutableResourceDeprecations.put(IndexDeprecationChecker.NAME, in.readMapOfLists(DeprecationIssue::new));
|
||||
}
|
||||
if (in.getTransportVersion()
|
||||
.between(TransportVersions.DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) {
|
||||
if (in.getTransportVersion().between(TransportVersions.V_8_17_0, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) {
|
||||
mutableResourceDeprecations.put(DataStreamDeprecationChecker.NAME, in.readMapOfLists(DeprecationIssue::new));
|
||||
}
|
||||
if (in.getTransportVersion().before(TransportVersions.V_7_11_0)) {
|
||||
|
@ -140,8 +139,7 @@ public class DeprecationInfoAction extends ActionType<DeprecationInfoAction.Resp
|
|||
if (out.getTransportVersion().before(TransportVersions.RESOURCE_DEPRECATION_CHECKS)) {
|
||||
out.writeMap(getIndexSettingsIssues(), StreamOutput::writeCollection);
|
||||
}
|
||||
if (out.getTransportVersion()
|
||||
.between(TransportVersions.DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) {
|
||||
if (out.getTransportVersion().between(TransportVersions.V_8_17_0, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) {
|
||||
out.writeMap(getDataStreamDeprecationIssues(), StreamOutput::writeCollection);
|
||||
}
|
||||
if (out.getTransportVersion().before(TransportVersions.V_7_11_0)) {
|
||||
|
|
|
@ -68,8 +68,7 @@ public class QueryRulesetListItem implements Writeable, ToXContentObject {
|
|||
this.criteriaTypeToCountMap = Map.of();
|
||||
}
|
||||
TransportVersion streamTransportVersion = in.getTransportVersion();
|
||||
if (streamTransportVersion.isPatchFrom(TransportVersions.V_8_16_1)
|
||||
|| streamTransportVersion.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) {
|
||||
if (streamTransportVersion.onOrAfter(TransportVersions.V_8_16_1)) {
|
||||
this.ruleTypeToCountMap = in.readMap(m -> in.readEnum(QueryRule.QueryRuleType.class), StreamInput::readInt);
|
||||
} else {
|
||||
this.ruleTypeToCountMap = Map.of();
|
||||
|
@ -103,8 +102,7 @@ public class QueryRulesetListItem implements Writeable, ToXContentObject {
|
|||
out.writeMap(criteriaTypeToCountMap, StreamOutput::writeEnum, StreamOutput::writeInt);
|
||||
}
|
||||
TransportVersion streamTransportVersion = out.getTransportVersion();
|
||||
if (streamTransportVersion.isPatchFrom(TransportVersions.V_8_16_1)
|
||||
|| streamTransportVersion.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) {
|
||||
if (streamTransportVersion.onOrAfter(TransportVersions.V_8_16_1)) {
|
||||
out.writeMap(ruleTypeToCountMap, StreamOutput::writeEnum, StreamOutput::writeInt);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -121,6 +121,6 @@ public class RuleQueryRankDoc extends RankDoc {
|
|||
|
||||
@Override
|
||||
public TransportVersion getMinimalSupportedVersion() {
|
||||
return TransportVersions.QUERY_RULES_RETRIEVER;
|
||||
return TransportVersions.V_8_17_0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ public class ListQueryRulesetsActionResponseBWCSerializingTests extends Abstract
|
|||
ListQueryRulesetsAction.Response instance,
|
||||
TransportVersion version
|
||||
) {
|
||||
if (version.isPatchFrom(TransportVersions.V_8_16_1) || version.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) {
|
||||
if (version.onOrAfter(TransportVersions.V_8_16_1)) {
|
||||
return instance;
|
||||
} else if (version.onOrAfter(QueryRulesetListItem.EXPANDED_RULESET_COUNT_TRANSPORT_VERSION)) {
|
||||
List<QueryRulesetListItem> updatedResults = new ArrayList<>();
|
||||
|
|
|
@ -148,7 +148,7 @@ public class FieldAttribute extends TypedAttribute {
|
|||
}
|
||||
|
||||
private void writeParentName(StreamOutput out) throws IOException {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
((PlanStreamOutput) out).writeOptionalCachedString(parentName);
|
||||
} else {
|
||||
// Previous versions only used the parent field attribute to retrieve the parent's name, so we can use just any
|
||||
|
@ -159,7 +159,7 @@ public class FieldAttribute extends TypedAttribute {
|
|||
}
|
||||
|
||||
private static String readParentName(StreamInput in) throws IOException {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
return ((PlanStreamInput) in).readOptionalCachedString();
|
||||
}
|
||||
|
||||
|
|
|
@ -424,7 +424,7 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable {
|
|||
this.failedShards = in.readOptionalInt();
|
||||
this.took = in.readOptionalTimeValue();
|
||||
this.skipUnavailable = in.readBoolean();
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_CCS_EXEC_INFO_WITH_FAILURES)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
this.failures = Collections.unmodifiableList(in.readCollectionAsList(ShardSearchFailure::readShardSearchFailure));
|
||||
} else {
|
||||
this.failures = Collections.emptyList();
|
||||
|
@ -442,7 +442,7 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable {
|
|||
out.writeOptionalInt(failedShards);
|
||||
out.writeOptionalTimeValue(took);
|
||||
out.writeBoolean(skipUnavailable);
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_CCS_EXEC_INFO_WITH_FAILURES)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
out.writeCollection(failures);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -211,7 +211,7 @@ public class EnrichLookupService extends AbstractLookupService<EnrichLookupServi
|
|||
PlanStreamInput planIn = new PlanStreamInput(in, in.namedWriteableRegistry(), null);
|
||||
List<NamedExpression> extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class);
|
||||
var source = Source.EMPTY;
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
source = Source.readFrom(planIn);
|
||||
}
|
||||
TransportRequest result = new TransportRequest(
|
||||
|
@ -242,7 +242,7 @@ public class EnrichLookupService extends AbstractLookupService<EnrichLookupServi
|
|||
out.writeWriteable(inputPage);
|
||||
PlanStreamOutput planOut = new PlanStreamOutput(out, null);
|
||||
planOut.writeNamedWriteableCollection(extractFields);
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
source.writeTo(planOut);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -149,7 +149,7 @@ public class LookupFromIndexService extends AbstractLookupService<LookupFromInde
|
|||
List<NamedExpression> extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class);
|
||||
String matchField = in.readString();
|
||||
var source = Source.EMPTY;
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) {
|
||||
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
source = Source.readFrom(planIn);
|
||||
}
|
||||
// Source.readFrom() requires the query from the Configuration passed to PlanStreamInput.
|
||||
|
@ -182,7 +182,7 @@ public class LookupFromIndexService extends AbstractLookupService<LookupFromInde
|
|||
PlanStreamOutput planOut = new PlanStreamOutput(out, null);
|
||||
planOut.writeNamedWriteableCollection(extractFields);
|
||||
out.writeString(matchField);
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
source.writeTo(planOut);
|
||||
}
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_LOOKUP_JOIN_SOURCE_TEXT)) {
|
||||
|
|
|
@ -107,9 +107,7 @@ public class GoogleVertexAiEmbeddingsTaskSettings implements TaskSettings {
|
|||
public GoogleVertexAiEmbeddingsTaskSettings(StreamInput in) throws IOException {
|
||||
this.autoTruncate = in.readOptionalBoolean();
|
||||
|
||||
var inputType = (in.getTransportVersion().onOrAfter(TransportVersions.VERTEX_AI_INPUT_TYPE_ADDED))
|
||||
? in.readOptionalEnum(InputType.class)
|
||||
: null;
|
||||
var inputType = (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) ? in.readOptionalEnum(InputType.class) : null;
|
||||
|
||||
validateInputType(inputType);
|
||||
this.inputType = inputType;
|
||||
|
@ -150,7 +148,7 @@ public class GoogleVertexAiEmbeddingsTaskSettings implements TaskSettings {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalBoolean(this.autoTruncate);
|
||||
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.VERTEX_AI_INPUT_TYPE_ADDED)) {
|
||||
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
|
||||
out.writeOptionalEnum(this.inputType);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -256,7 +256,7 @@ public class GoogleVertexAiEmbeddingsTaskSettingsTests extends AbstractBWCWireSe
|
|||
GoogleVertexAiEmbeddingsTaskSettings instance,
|
||||
TransportVersion version
|
||||
) {
|
||||
if (version.before(TransportVersions.VERTEX_AI_INPUT_TYPE_ADDED)) {
|
||||
if (version.before(TransportVersions.V_8_17_0)) {
|
||||
// default to null input type if node is on a version before input type was introduced
|
||||
return new GoogleVertexAiEmbeddingsTaskSettings(instance.autoTruncate(), null);
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue