Merge main into multi-project

This commit is contained in:
Tim Vernum 2025-02-22 16:42:59 +11:00
commit fd9f8e1b08
164 changed files with 3982 additions and 4231 deletions

View file

@ -110,6 +110,8 @@ public abstract class ElasticsearchBuildCompletePlugin implements Plugin<Project
projectDirFiles.include("**/build/testrun/*/temp/**"); projectDirFiles.include("**/build/testrun/*/temp/**");
projectDirFiles.include("**/build/**/hs_err_pid*.log"); projectDirFiles.include("**/build/**/hs_err_pid*.log");
projectDirFiles.include("**/build/**/replay_pid*.log"); projectDirFiles.include("**/build/**/replay_pid*.log");
// core dump files are in the working directory of the installation, which is not project specific
projectDirFiles.include("distribution/**/build/install/*/core.*");
projectDirFiles.exclude("**/build/testclusters/**/data/**"); projectDirFiles.exclude("**/build/testclusters/**/data/**");
projectDirFiles.exclude("**/build/testclusters/**/distro/**"); projectDirFiles.exclude("**/build/testclusters/**/distro/**");
projectDirFiles.exclude("**/build/testclusters/**/repo/**"); projectDirFiles.exclude("**/build/testclusters/**/repo/**");

View file

@ -0,0 +1,5 @@
pr: 121392
summary: Include data streams when converting an existing resource to a system resource
area: Infra/Core
type: bug
issues: []

View file

@ -0,0 +1,5 @@
pr: 122390
summary: Add health indicator impact to `HealthPeriodicLogger`
area: Health
type: enhancement
issues: []

View file

@ -0,0 +1,5 @@
pr: 122938
summary: Fix geoip databases index access after system feature migration (again)
area: Ingest Node
type: bug
issues: []

View file

@ -0,0 +1,5 @@
pr: 123085
summary: Remove duplicated nested commands
area: ES|QL
type: bug
issues: []

View file

@ -0,0 +1,5 @@
pr: 123155
summary: Add `ElasticInferenceServiceCompletionServiceSettings`
area: Machine Learning
type: bug
issues: []

View file

@ -12,6 +12,7 @@
module org.elasticsearch.entitlement.bridge { module org.elasticsearch.entitlement.bridge {
requires java.net.http; requires java.net.http;
requires jdk.net; requires jdk.net;
requires java.logging;
exports org.elasticsearch.entitlement.bridge; exports org.elasticsearch.entitlement.bridge;
} }

View file

@ -88,6 +88,7 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinPool;
import java.util.function.BiPredicate; import java.util.function.BiPredicate;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.logging.FileHandler;
import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.HttpsURLConnection;
@ -882,9 +883,34 @@ public interface EntitlementChecker {
void check$java_nio_file_Files$$lines(Class<?> callerClass, Path path); void check$java_nio_file_Files$$lines(Class<?> callerClass, Path path);
// file system providers
void check$java_nio_file_spi_FileSystemProvider$(Class<?> callerClass); void check$java_nio_file_spi_FileSystemProvider$(Class<?> callerClass);
void check$java_util_logging_FileHandler$(Class<?> callerClass);
void check$java_util_logging_FileHandler$(Class<?> callerClass, String pattern);
void check$java_util_logging_FileHandler$(Class<?> callerClass, String pattern, boolean append);
void check$java_util_logging_FileHandler$(Class<?> callerClass, String pattern, int limit, int count);
void check$java_util_logging_FileHandler$(Class<?> callerClass, String pattern, int limit, int count, boolean append);
void check$java_util_logging_FileHandler$(Class<?> callerClass, String pattern, long limit, int count, boolean append);
void check$java_util_logging_FileHandler$close(Class<?> callerClass, FileHandler that);
void check$java_net_http_HttpRequest$BodyPublishers$$ofFile(Class<?> callerClass, Path path);
void check$java_net_http_HttpResponse$BodyHandlers$$ofFile(Class<?> callerClass, Path path);
void check$java_net_http_HttpResponse$BodyHandlers$$ofFile(Class<?> callerClass, Path path, OpenOption... options);
void check$java_net_http_HttpResponse$BodyHandlers$$ofFileDownload(Class<?> callerClass, Path directory, OpenOption... openOptions);
void check$java_net_http_HttpResponse$BodySubscribers$$ofFile(Class<?> callerClass, Path directory);
void check$java_net_http_HttpResponse$BodySubscribers$$ofFile(Class<?> callerClass, Path directory, OpenOption... openOptions);
void checkNewFileSystem(Class<?> callerClass, FileSystemProvider that, URI uri, Map<String, ?> env); void checkNewFileSystem(Class<?> callerClass, FileSystemProvider that, URI uri, Map<String, ?> env);
void checkNewFileSystem(Class<?> callerClass, FileSystemProvider that, Path path, Map<String, ?> env); void checkNewFileSystem(Class<?> callerClass, FileSystemProvider that, Path path, Map<String, ?> env);

View file

@ -12,6 +12,7 @@ module org.elasticsearch.entitlement.qa.entitled {
requires org.elasticsearch.entitlement; requires org.elasticsearch.entitlement;
requires org.elasticsearch.base; // SuppressForbidden requires org.elasticsearch.base; // SuppressForbidden
requires org.elasticsearch.logging; requires org.elasticsearch.logging;
requires java.logging;
exports org.elasticsearch.entitlement.qa.entitled; // Must be unqualified so non-modular IT tests can call us exports org.elasticsearch.entitlement.qa.entitled; // Must be unqualified so non-modular IT tests can call us
} }

View file

@ -22,6 +22,8 @@ import java.io.FileReader;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.RandomAccessFile; import java.io.RandomAccessFile;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
@ -29,10 +31,13 @@ import java.security.GeneralSecurityException;
import java.security.KeyStore; import java.security.KeyStore;
import java.util.Scanner; import java.util.Scanner;
import java.util.jar.JarFile; import java.util.jar.JarFile;
import java.util.logging.FileHandler;
import java.util.zip.ZipException; import java.util.zip.ZipException;
import java.util.zip.ZipFile; import java.util.zip.ZipFile;
import static java.nio.charset.Charset.defaultCharset; import static java.nio.charset.Charset.defaultCharset;
import static java.nio.file.StandardOpenOption.CREATE;
import static java.nio.file.StandardOpenOption.WRITE;
import static java.util.zip.ZipFile.OPEN_DELETE; import static java.util.zip.ZipFile.OPEN_DELETE;
import static java.util.zip.ZipFile.OPEN_READ; import static java.util.zip.ZipFile.OPEN_READ;
import static org.elasticsearch.entitlement.qa.entitled.EntitledActions.createTempFileForWrite; import static org.elasticsearch.entitlement.qa.entitled.EntitledActions.createTempFileForWrite;
@ -477,5 +482,86 @@ class FileCheckActions {
new Scanner(readFile().toFile(), "UTF-8"); new Scanner(readFile().toFile(), "UTF-8");
} }
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void fileHandler() throws IOException {
new FileHandler();
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void fileHandler_String() throws IOException {
new FileHandler(readFile().toString());
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void fileHandler_StringBoolean() throws IOException {
new FileHandler(readFile().toString(), false);
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void fileHandler_StringIntInt() throws IOException {
new FileHandler(readFile().toString(), 1, 2);
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void fileHandler_StringIntIntBoolean() throws IOException {
new FileHandler(readFile().toString(), 1, 2, false);
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void fileHandler_StringLongIntBoolean() throws IOException {
new FileHandler(readFile().toString(), 1L, 2, false);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void httpRequestBodyPublishersOfFile() throws IOException {
HttpRequest.BodyPublishers.ofFile(readFile());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void httpResponseBodyHandlersOfFile() {
HttpResponse.BodyHandlers.ofFile(readWriteFile());
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void httpResponseBodyHandlersOfFile_readOnly() {
HttpResponse.BodyHandlers.ofFile(readFile());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void httpResponseBodyHandlersOfFileDownload() {
HttpResponse.BodyHandlers.ofFileDownload(readWriteDir());
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void httpResponseBodyHandlersOfFileDownload_readOnly() {
HttpResponse.BodyHandlers.ofFileDownload(readDir());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void httpResponseBodySubscribersOfFile_File() {
HttpResponse.BodySubscribers.ofFile(readWriteFile());
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void httpResponseBodySubscribersOfFile_File_readOnly() {
HttpResponse.BodySubscribers.ofFile(readFile());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void httpResponseBodySubscribersOfFile_FileOpenOptions() {
// Note that, unlike other methods like BodyHandlers.ofFile, this is indeed
// an overload distinct from ofFile with no OpenOptions, and so it needs its
// own instrumentation and its own test.
HttpResponse.BodySubscribers.ofFile(readWriteFile(), CREATE, WRITE);
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void httpResponseBodySubscribersOfFile_FileOpenOptions_readOnly() {
// Note that, unlike other methods like BodyHandlers.ofFile, this is indeed
// an overload distinct from ofFile with no OpenOptions, and so it needs its
// own instrumentation and its own test.
HttpResponse.BodySubscribers.ofFile(readFile(), CREATE, WRITE);
}
private FileCheckActions() {} private FileCheckActions() {}
} }

View file

@ -8,12 +8,13 @@
*/ */
module org.elasticsearch.entitlement { module org.elasticsearch.entitlement {
requires org.elasticsearch.base;
requires org.elasticsearch.xcontent; requires org.elasticsearch.xcontent;
requires org.elasticsearch.logging; requires org.elasticsearch.logging;
requires java.instrument; requires java.instrument;
requires org.elasticsearch.base; requires java.logging;
requires jdk.attach;
requires java.net.http; requires java.net.http;
requires jdk.attach;
requires jdk.net; requires jdk.net;
requires static org.elasticsearch.entitlement.bridge; // At runtime, this will be in java.base requires static org.elasticsearch.entitlement.bridge; // At runtime, this will be in java.base

View file

@ -97,6 +97,7 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinPool;
import java.util.function.BiPredicate; import java.util.function.BiPredicate;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.logging.FileHandler;
import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.HttpsURLConnection;
@ -1845,6 +1846,78 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
policyManager.checkChangeJVMGlobalState(callerClass); policyManager.checkChangeJVMGlobalState(callerClass);
} }
@Override
public void check$java_util_logging_FileHandler$(Class<?> callerClass) {
policyManager.checkLoggingFileHandler(callerClass);
}
@Override
public void check$java_util_logging_FileHandler$(Class<?> callerClass, String pattern) {
policyManager.checkLoggingFileHandler(callerClass);
}
@Override
public void check$java_util_logging_FileHandler$(Class<?> callerClass, String pattern, boolean append) {
policyManager.checkLoggingFileHandler(callerClass);
}
@Override
public void check$java_util_logging_FileHandler$(Class<?> callerClass, String pattern, int limit, int count) {
policyManager.checkLoggingFileHandler(callerClass);
}
@Override
public void check$java_util_logging_FileHandler$(Class<?> callerClass, String pattern, int limit, int count, boolean append) {
policyManager.checkLoggingFileHandler(callerClass);
}
@Override
public void check$java_util_logging_FileHandler$(Class<?> callerClass, String pattern, long limit, int count, boolean append) {
policyManager.checkLoggingFileHandler(callerClass);
}
@Override
public void check$java_util_logging_FileHandler$close(Class<?> callerClass, FileHandler that) {
// Note that there's no IT test for this one, because there's no way to create
// a FileHandler. However, we have this check just in case someone does manage
// to get their hands on a FileHandler and uses close() to cause its lock file to be deleted.
policyManager.checkLoggingFileHandler(callerClass);
}
@Override
public void check$java_net_http_HttpRequest$BodyPublishers$$ofFile(Class<?> callerClass, Path path) {
policyManager.checkFileRead(callerClass, path);
}
@Override
public void check$java_net_http_HttpResponse$BodyHandlers$$ofFile(Class<?> callerClass, Path path) {
policyManager.checkFileWrite(callerClass, path);
}
@Override
public void check$java_net_http_HttpResponse$BodyHandlers$$ofFile(Class<?> callerClass, Path path, OpenOption... options) {
policyManager.checkFileWrite(callerClass, path);
}
@Override
public void check$java_net_http_HttpResponse$BodyHandlers$$ofFileDownload(
Class<?> callerClass,
Path directory,
OpenOption... openOptions
) {
policyManager.checkFileWrite(callerClass, directory);
}
@Override
public void check$java_net_http_HttpResponse$BodySubscribers$$ofFile(Class<?> callerClass, Path directory) {
policyManager.checkFileWrite(callerClass, directory);
}
@Override
public void check$java_net_http_HttpResponse$BodySubscribers$$ofFile(Class<?> callerClass, Path directory, OpenOption... openOptions) {
policyManager.checkFileWrite(callerClass, directory);
}
@Override @Override
public void checkNewFileSystem(Class<?> callerClass, FileSystemProvider that, URI uri, Map<String, ?> env) { public void checkNewFileSystem(Class<?> callerClass, FileSystemProvider that, URI uri, Map<String, ?> env) {
policyManager.checkChangeJVMGlobalState(callerClass); policyManager.checkChangeJVMGlobalState(callerClass);

View file

@ -49,8 +49,24 @@ public final class FileAccessTree {
readPaths.sort(String::compareTo); readPaths.sort(String::compareTo);
writePaths.sort(String::compareTo); writePaths.sort(String::compareTo);
this.readPaths = readPaths.toArray(new String[0]); this.readPaths = pruneSortedPaths(readPaths).toArray(new String[0]);
this.writePaths = writePaths.toArray(new String[0]); this.writePaths = pruneSortedPaths(writePaths).toArray(new String[0]);
}
private static List<String> pruneSortedPaths(List<String> paths) {
List<String> prunedReadPaths = new ArrayList<>();
if (paths.isEmpty() == false) {
String currentPath = paths.get(0);
prunedReadPaths.add(currentPath);
for (int i = 1; i < paths.size(); ++i) {
String nextPath = paths.get(i);
if (nextPath.startsWith(currentPath) == false) {
prunedReadPaths.add(nextPath);
currentPath = nextPath;
}
}
}
return prunedReadPaths;
} }
public static FileAccessTree of(FilesEntitlement filesEntitlement, PathLookup pathLookup) { public static FileAccessTree of(FilesEntitlement filesEntitlement, PathLookup pathLookup) {

View file

@ -240,6 +240,10 @@ public class PolicyManager {
neverEntitled(callerClass, () -> walkStackForCheckMethodName().orElse("change JVM global state")); neverEntitled(callerClass, () -> walkStackForCheckMethodName().orElse("change JVM global state"));
} }
public void checkLoggingFileHandler(Class<?> callerClass) {
neverEntitled(callerClass, () -> walkStackForCheckMethodName().orElse("create logging file handler"));
}
private Optional<String> walkStackForCheckMethodName() { private Optional<String> walkStackForCheckMethodName() {
// Look up the check$ method to compose an informative error message. // Look up the check$ method to compose an informative error message.
// This way, we don't need to painstakingly describe every individual global-state change. // This way, we don't need to painstakingly describe every individual global-state change.

View file

@ -96,6 +96,27 @@ public class FileAccessTreeTests extends ESTestCase {
assertThat(tree.canWrite(path("foo/bar")), is(true)); assertThat(tree.canWrite(path("foo/bar")), is(true));
} }
public void testPrunedPaths() {
var tree = accessTree(entitlement("foo", "read", "foo/baz", "read", "foo/bar", "read"));
assertThat(tree.canRead(path("foo")), is(true));
assertThat(tree.canWrite(path("foo")), is(false));
assertThat(tree.canRead(path("foo/bar")), is(true));
assertThat(tree.canWrite(path("foo/bar")), is(false));
assertThat(tree.canRead(path("foo/baz")), is(true));
assertThat(tree.canWrite(path("foo/baz")), is(false));
// also test a non-existent subpath
assertThat(tree.canRead(path("foo/barf")), is(true));
assertThat(tree.canWrite(path("foo/barf")), is(false));
tree = accessTree(entitlement("foo", "read", "foo/bar", "read_write"));
assertThat(tree.canRead(path("foo")), is(true));
assertThat(tree.canWrite(path("foo")), is(false));
assertThat(tree.canRead(path("foo/bar")), is(true));
assertThat(tree.canWrite(path("foo/bar")), is(true));
assertThat(tree.canRead(path("foo/baz")), is(true));
assertThat(tree.canWrite(path("foo/baz")), is(false));
}
public void testReadWithRelativePath() { public void testReadWithRelativePath() {
for (var dir : List.of("config", "home")) { for (var dir : List.of("config", "home")) {
var tree = accessTree(entitlement(Map.of("relative_path", "foo", "mode", "read", "relative_to", dir))); var tree = accessTree(entitlement(Map.of("relative_path", "foo", "mode", "read", "relative_to", dir)));

View file

@ -123,6 +123,9 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas
// as should a normal get * // as should a normal get *
assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndex)); assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndex));
// and getting data streams
assertBusy(() -> testGetDatastreams());
} else { } else {
// after the upgrade, but before the migration, Kibana should work // after the upgrade, but before the migration, Kibana should work
assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndex)); assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndex));
@ -130,6 +133,9 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas
// as should a normal get * // as should a normal get *
assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndex)); assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndex));
// and getting data streams
assertBusy(() -> testGetDatastreams());
// migrate the system features and give the cluster a moment to settle // migrate the system features and give the cluster a moment to settle
Request migrateSystemFeatures = new Request("POST", "/_migration/system_features"); Request migrateSystemFeatures = new Request("POST", "/_migration/system_features");
assertOK(client().performRequest(migrateSystemFeatures)); assertOK(client().performRequest(migrateSystemFeatures));
@ -144,6 +150,9 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas
// as should a normal get * // as should a normal get *
assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndexReindexed)); assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndexReindexed));
// and getting data streams
assertBusy(() -> testGetDatastreams());
Request disableDownloader = new Request("PUT", "/_cluster/settings"); Request disableDownloader = new Request("PUT", "/_cluster/settings");
disableDownloader.setJsonEntity(""" disableDownloader.setJsonEntity("""
{"persistent": {"ingest.geoip.downloader.enabled": false}} {"persistent": {"ingest.geoip.downloader.enabled": false}}
@ -257,4 +266,15 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas
Map<String, Object> map = responseAsMap(response); Map<String, Object> map = responseAsMap(response);
assertThat(map.keySet(), is(new HashSet<>(indexNames))); assertThat(map.keySet(), is(new HashSet<>(indexNames)));
} }
private void testGetDatastreams() throws IOException {
Request getStar = new Request("GET", "_data_stream");
getStar.setOptions(
RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE) // we don't care about warnings, just errors
);
Response response = client().performRequest(getStar);
assertOK(response);
// note: we don't actually care about the response, just that there was one and that it didn't error out on us
}
} }

View file

@ -50,6 +50,7 @@ import org.elasticsearch.ingest.IngestService;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -341,7 +342,7 @@ public class DatabaseNodeServiceTests extends ESTestCase {
} }
SearchHits hits = SearchHits.unpooled(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f); SearchHits hits = SearchHits.unpooled(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f);
SearchResponse searchResponse = new SearchResponse(hits, null, null, false, null, null, 0, null, 1, 1, 0, 1L, null, null); SearchResponse searchResponse = SearchResponseUtils.successfulResponse(hits);
toRelease.add(searchResponse::decRef); toRelease.add(searchResponse::decRef);
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
ActionFuture<SearchResponse> actionFuture = mock(ActionFuture.class); ActionFuture<SearchResponse> actionFuture = mock(ActionFuture.class);

View file

@ -31,7 +31,6 @@ import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
import org.hamcrest.Matcher;
import org.junit.AssumptionViolatedException; import org.junit.AssumptionViolatedException;
import java.io.IOException; import java.io.IOException;
@ -39,7 +38,6 @@ import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Function;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Stream; import java.util.stream.Stream;
@ -48,7 +46,6 @@ import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notANumber;
public class ScaledFloatFieldMapperTests extends NumberFieldMapperTests { public class ScaledFloatFieldMapperTests extends NumberFieldMapperTests {
@ -382,7 +379,7 @@ public class ScaledFloatFieldMapperTests extends NumberFieldMapperTests {
if (randomBoolean()) { if (randomBoolean()) {
Value v = generateValue(); Value v = generateValue();
if (v.malformedOutput == null) { if (v.malformedOutput == null) {
return new SyntheticSourceExample(v.input, v.output, roundDocValues(v.output), this::mapping); return new SyntheticSourceExample(v.input, v.output, this::mapping);
} }
return new SyntheticSourceExample(v.input, v.malformedOutput, null, this::mapping); return new SyntheticSourceExample(v.input, v.malformedOutput, null, this::mapping);
} }
@ -396,9 +393,7 @@ public class ScaledFloatFieldMapperTests extends NumberFieldMapperTests {
List<Object> outList = Stream.concat(outputFromDocValues.stream(), malformedOutput).toList(); List<Object> outList = Stream.concat(outputFromDocValues.stream(), malformedOutput).toList();
Object out = outList.size() == 1 ? outList.get(0) : outList; Object out = outList.size() == 1 ? outList.get(0) : outList;
List<Double> outBlockList = outputFromDocValues.stream().map(this::roundDocValues).sorted().toList(); return new SyntheticSourceExample(in, out, this::mapping);
Object outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList;
return new SyntheticSourceExample(in, out, outBlock, this::mapping);
} }
private record Value(Object input, Double output, Object malformedOutput) {} private record Value(Object input, Double output, Object malformedOutput) {}
@ -442,16 +437,6 @@ public class ScaledFloatFieldMapperTests extends NumberFieldMapperTests {
return decoded; return decoded;
} }
private double roundDocValues(double d) {
// Special case due to rounding, see implementation.
if (Math.abs(d) == Double.MAX_VALUE) {
return d;
}
long encoded = Math.round(d * scalingFactor);
return encoded * (1 / scalingFactor);
}
private void mapping(XContentBuilder b) throws IOException { private void mapping(XContentBuilder b) throws IOException {
b.field("type", "scaled_float"); b.field("type", "scaled_float");
b.field("scaling_factor", scalingFactor); b.field("scaling_factor", scalingFactor);
@ -475,14 +460,9 @@ public class ScaledFloatFieldMapperTests extends NumberFieldMapperTests {
} }
} }
@Override protected BlockReaderSupport getSupportedReaders(MapperService mapper, String loaderFieldName) {
protected Function<Object, Object> loadBlockExpected() { assumeTrue("Disabled, tested by ScaledFloatFieldBlockLoaderTests instead", false);
return v -> (Number) v; return null;
}
@Override
protected Matcher<?> blockItemMatcher(Object expected) {
return "NaN".equals(expected) ? notANumber() : equalTo(expected);
} }
@Override @Override

View file

@ -68,6 +68,7 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.tasks.TaskManager;
@ -574,22 +575,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
new TotalHits(0, TotalHits.Relation.EQUAL_TO), new TotalHits(0, TotalHits.Relation.EQUAL_TO),
0 0
); );
SearchResponse searchResponse = new SearchResponse( SearchResponse searchResponse = SearchResponseUtils.response(hits).scrollId(scrollId()).shards(5, 4, 0).build();
hits,
null,
null,
false,
false,
null,
1,
scrollId(),
5,
4,
0,
randomLong(),
null,
SearchResponse.Clusters.EMPTY
);
try { try {
client.lastSearch.get().listener.onResponse(searchResponse); client.lastSearch.get().listener.onResponse(searchResponse);

View file

@ -30,6 +30,7 @@ import org.elasticsearch.index.reindex.ClientScrollableHitSource;
import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.index.reindex.ScrollableHitSource;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.TestThreadPool;
@ -166,22 +167,7 @@ public class ClientScrollableHitSourceTests extends ESTestCase {
new TotalHits(0, TotalHits.Relation.EQUAL_TO), new TotalHits(0, TotalHits.Relation.EQUAL_TO),
0 0
); );
return new SearchResponse( return SearchResponseUtils.response(hits).scrollId(randomSimpleString(random(), 1, 10)).shards(5, 4, 0).build();
hits,
null,
null,
false,
false,
null,
1,
randomSimpleString(random(), 1, 10),
5,
4,
0,
randomLong(),
null,
SearchResponse.Clusters.EMPTY
);
} }
private void assertSameHits(List<? extends ScrollableHitSource.Hit> actual, SearchHit[] expected) { private void assertSameHits(List<? extends ScrollableHitSource.Hit> actual, SearchHit[] expected) {

View file

@ -329,72 +329,22 @@ tests:
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/troubleshooting/common-issues/disk-usage-exceeded/line_65} method: test {yaml=reference/troubleshooting/common-issues/disk-usage-exceeded/line_65}
issue: https://github.com/elastic/elasticsearch/issues/123094 issue: https://github.com/elastic/elasticsearch/issues/123094
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
method: test {fork.ForkWithWhereSortAndLimit ASYNC}
issue: https://github.com/elastic/elasticsearch/issues/123096
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
method: test {fork.ForkWithCommonPrefilter ASYNC}
issue: https://github.com/elastic/elasticsearch/issues/123097
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
method: test {fork.FiveFork SYNC}
issue: https://github.com/elastic/elasticsearch/issues/123098
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
method: test {fork.FiveFork ASYNC}
issue: https://github.com/elastic/elasticsearch/issues/123099
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
method: test {fork.SimpleFork ASYNC}
issue: https://github.com/elastic/elasticsearch/issues/123100
- class: org.elasticsearch.xpack.esql.action.CrossClusterQueryWithPartialResultsIT - class: org.elasticsearch.xpack.esql.action.CrossClusterQueryWithPartialResultsIT
method: testPartialResults method: testPartialResults
issue: https://github.com/elastic/elasticsearch/issues/123101 issue: https://github.com/elastic/elasticsearch/issues/123101
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT - class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryStopIT
method: test {fork.ForkWithWhereSortAndLimit SYNC} method: testStopQueryLocal
issue: https://github.com/elastic/elasticsearch/issues/123103 issue: https://github.com/elastic/elasticsearch/issues/121672
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {fork.SimpleFork SYNC} method: test {yaml=reference/snapshot-restore/restore-snapshot/line_408}
issue: https://github.com/elastic/elasticsearch/issues/123104 issue: https://github.com/elastic/elasticsearch/issues/123192
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT - class: org.elasticsearch.xpack.ilm.actions.SearchableSnapshotActionIT
method: test {fork.ForkWithWhereSortDescAndLimit SYNC} method: testRestoredIndexManagedByLocalPolicySkipsIllegalActions
issue: https://github.com/elastic/elasticsearch/issues/123107 issue: https://github.com/elastic/elasticsearch/issues/123202
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT - class: org.elasticsearch.xpack.ilm.TimeSeriesLifecycleActionsIT
method: test {fork.ForkWithWhereSortDescAndLimit ASYNC} method: testHistoryIsWrittenWithFailure
issue: https://github.com/elastic/elasticsearch/issues/123108 issue: https://github.com/elastic/elasticsearch/issues/123203
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
method: test {fork.ForkWithCommonPrefilter SYNC}
issue: https://github.com/elastic/elasticsearch/issues/123109
- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
method: test {p0=esql/40_tsdb/to_string aggregate_metric_double}
issue: https://github.com/elastic/elasticsearch/issues/123116
- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
method: test {fork.ForkWithCommonPrefilter}
issue: https://github.com/elastic/elasticsearch/issues/123117
- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
method: test {fork.SimpleFork}
issue: https://github.com/elastic/elasticsearch/issues/123118
- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
method: test {fork.FiveFork}
issue: https://github.com/elastic/elasticsearch/issues/123119
- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
method: test {fork.ForkWithWhereSortDescAndLimit}
issue: https://github.com/elastic/elasticsearch/issues/123120
- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
method: test {p0=esql/46_downsample/Render stats from downsampled index}
issue: https://github.com/elastic/elasticsearch/issues/123122
- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
method: test {p0=esql/40_unsupported_types/unsupported}
issue: https://github.com/elastic/elasticsearch/issues/123123
- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
method: test {p0=esql/40_tsdb/render aggregate_metric_double when missing min and max}
issue: https://github.com/elastic/elasticsearch/issues/123124
- class: org.elasticsearch.index.mapper.extras.ScaledFloatFieldMapperTests
method: testBlockLoaderFromRowStrideReader
issue: https://github.com/elastic/elasticsearch/issues/123126
- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
method: test {p0=esql/40_tsdb/render aggregate_metric_double when missing value}
issue: https://github.com/elastic/elasticsearch/issues/123130
- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
method: test {fork.ForkWithWhereSortAndLimit}
issue: https://github.com/elastic/elasticsearch/issues/123131
# Examples: # Examples:
# #

View file

@ -12,16 +12,13 @@ package org.elasticsearch.search;
import org.apache.http.HttpEntity; import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType; import org.apache.http.entity.ContentType;
import org.apache.http.nio.entity.NStringEntity; import org.apache.http.nio.entity.NStringEntity;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersion;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchShardsRequest; import org.elasticsearch.action.search.SearchShardsRequest;
import org.elasticsearch.action.search.SearchShardsResponse; import org.elasticsearch.action.search.SearchShardsResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.search.TransportSearchAction;
import org.elasticsearch.action.search.TransportSearchShardsAction; import org.elasticsearch.action.search.TransportSearchShardsAction;
import org.elasticsearch.client.Request; import org.elasticsearch.client.Request;
@ -33,11 +30,11 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.node.VersionInformation; import org.elasticsearch.cluster.node.VersionInformation;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.ElasticsearchCluster;
import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.rest.ObjectPath;
@ -102,21 +99,8 @@ public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase {
EsExecutors.DIRECT_EXECUTOR_SERVICE, EsExecutors.DIRECT_EXECUTOR_SERVICE,
SearchRequest::new, SearchRequest::new,
(request, channel, task) -> { (request, channel, task) -> {
var searchResponse = new SearchResponse( var searchResponse = SearchResponseUtils.successfulResponse(
SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), SearchHits.empty(Lucene.TOTAL_HITS_EQUAL_TO_ZERO, Float.NaN)
InternalAggregations.EMPTY,
null,
false,
null,
null,
1,
null,
1,
1,
0,
100,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
); );
try { try {
channel.sendResponse(searchResponse); channel.sendResponse(searchResponse);

View file

@ -188,6 +188,7 @@ public class TransportVersions {
public static final TransportVersion REMOVE_DESIRED_NODE_VERSION_90 = def(9_000_0_03); public static final TransportVersion REMOVE_DESIRED_NODE_VERSION_90 = def(9_000_0_03);
public static final TransportVersion ESQL_DRIVER_TASK_DESCRIPTION_90 = def(9_000_0_04); public static final TransportVersion ESQL_DRIVER_TASK_DESCRIPTION_90 = def(9_000_0_04);
public static final TransportVersion REMOVE_ALL_APPLICABLE_SELECTOR_9_0 = def(9_000_0_05); public static final TransportVersion REMOVE_ALL_APPLICABLE_SELECTOR_9_0 = def(9_000_0_05);
public static final TransportVersion BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_90 = def(9_000_0_06);
public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED = def(9_001_0_00); public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED = def(9_001_0_00);
public static final TransportVersion REMOVE_SNAPSHOT_FAILURES = def(9_002_0_00); public static final TransportVersion REMOVE_SNAPSHOT_FAILURES = def(9_002_0_00);
public static final TransportVersion TRANSPORT_STATS_HANDLING_TIME_REQUIRED = def(9_003_0_00); public static final TransportVersion TRANSPORT_STATS_HANDLING_TIME_REQUIRED = def(9_003_0_00);
@ -202,6 +203,8 @@ public class TransportVersions {
public static final TransportVersion REMOVE_REPOSITORY_CONFLICT_MESSAGE = def(9_012_0_00); public static final TransportVersion REMOVE_REPOSITORY_CONFLICT_MESSAGE = def(9_012_0_00);
public static final TransportVersion RERANKER_FAILURES_ALLOWED = def(9_013_0_00); public static final TransportVersion RERANKER_FAILURES_ALLOWED = def(9_013_0_00);
public static final TransportVersion VOYAGE_AI_INTEGRATION_ADDED = def(9_014_0_00); public static final TransportVersion VOYAGE_AI_INTEGRATION_ADDED = def(9_014_0_00);
public static final TransportVersion BYTE_SIZE_VALUE_ALWAYS_USES_BYTES = def(9_015_0_00);
/* /*
* WARNING: DO NOT MERGE INTO MAIN! * WARNING: DO NOT MERGE INTO MAIN!
* This is the transport version used for all multi-project changes. * This is the transport version used for all multi-project changes.

View file

@ -301,6 +301,13 @@ public final class DataStream implements SimpleDiffable<DataStream>, ToXContentO
return failureIndices.containsIndex(indexName); return failureIndices.containsIndex(indexName);
} }
/**
* Returns true if the index name provided belongs to this data stream.
*/
public boolean containsIndex(String indexName) {
return backingIndices.containsIndex(indexName) || failureIndices.containsIndex(indexName);
}
public DataStreamOptions getDataStreamOptions() { public DataStreamOptions getDataStreamOptions() {
return dataStreamOptions; return dataStreamOptions;
} }
@ -782,8 +789,9 @@ public final class DataStream implements SimpleDiffable<DataStream>, ToXContentO
// ensure that no aliases reference index // ensure that no aliases reference index
ensureNoAliasesOnIndex(project, index); ensureNoAliasesOnIndex(project, index);
List<Index> backingIndices = new ArrayList<>(this.backingIndices.indices); List<Index> backingIndices = new ArrayList<>(this.backingIndices.indices.size() + 1);
backingIndices.add(0, index); backingIndices.add(index);
backingIndices.addAll(this.backingIndices.indices);
assert backingIndices.size() == this.backingIndices.indices.size() + 1; assert backingIndices.size() == this.backingIndices.indices.size() + 1;
return copy().setBackingIndices(this.backingIndices.copy().setIndices(backingIndices).build()) return copy().setBackingIndices(this.backingIndices.copy().setIndices(backingIndices).build())
.setGeneration(generation + 1) .setGeneration(generation + 1)
@ -808,8 +816,9 @@ public final class DataStream implements SimpleDiffable<DataStream>, ToXContentO
ensureNoAliasesOnIndex(project, index); ensureNoAliasesOnIndex(project, index);
List<Index> updatedFailureIndices = new ArrayList<>(failureIndices.indices); List<Index> updatedFailureIndices = new ArrayList<>(failureIndices.indices.size() + 1);
updatedFailureIndices.add(0, index); updatedFailureIndices.add(index);
updatedFailureIndices.addAll(failureIndices.indices);
assert updatedFailureIndices.size() == failureIndices.indices.size() + 1; assert updatedFailureIndices.size() == failureIndices.indices.size() + 1;
return copy().setFailureIndices(failureIndices.copy().setIndices(updatedFailureIndices).build()) return copy().setFailureIndices(failureIndices.copy().setIndices(updatedFailureIndices).build())
.setGeneration(generation + 1) .setGeneration(generation + 1)
@ -1039,7 +1048,7 @@ public final class DataStream implements SimpleDiffable<DataStream>, ToXContentO
* we return false. * we return false.
*/ */
public boolean isIndexManagedByDataStreamLifecycle(Index index, Function<String, IndexMetadata> indexMetadataSupplier) { public boolean isIndexManagedByDataStreamLifecycle(Index index, Function<String, IndexMetadata> indexMetadataSupplier) {
if (backingIndices.containsIndex(index.getName()) == false && failureIndices.containsIndex(index.getName()) == false) { if (containsIndex(index.getName()) == false) {
return false; return false;
} }
IndexMetadata indexMetadata = indexMetadataSupplier.apply(index.getName()); IndexMetadata indexMetadata = indexMetadataSupplier.apply(index.getName());

View file

@ -14,6 +14,7 @@ import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Tuple; import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel;
@ -158,7 +159,26 @@ public class IndexAbstractionResolver {
if (indexAbstraction.isSystem()) { if (indexAbstraction.isSystem()) {
// check if it is net new // check if it is net new
if (resolver.getNetNewSystemIndexPredicate().test(indexAbstraction.getName())) { if (resolver.getNetNewSystemIndexPredicate().test(indexAbstraction.getName())) {
return isSystemIndexVisible(resolver, indexAbstraction); // don't give this code any particular credit for being *correct*. it's just trying to resolve a combination of
// issues in a way that happens to *work*. there's probably a better way of writing things such that this won't
// be necessary, but for the moment, it happens to be expedient to write things this way.
// unwrap the alias and re-run the function on the write index of the alias -- that is, the alias is visible if
// the concrete index that it refers to is visible
Index writeIndex = indexAbstraction.getWriteIndex();
if (writeIndex == null) {
return false;
} else {
return isIndexVisible(
expression,
selectorString,
writeIndex.getName(),
indicesOptions,
projectMetadata,
resolver,
includeDataStreams
);
}
} }
} }

View file

@ -14,18 +14,29 @@ import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterStateTaskExecutor;
import org.elasticsearch.cluster.ClusterStateTaskListener;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.service.MasterServiceTaskQueue;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.FixForMultiProject;
import org.elasticsearch.index.Index;
import org.elasticsearch.indices.SystemIndexMappingUpdateService; import org.elasticsearch.indices.SystemIndexMappingUpdateService;
import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.indices.SystemIndices;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/** /**
* A service responsible for updating the metadata used by system indices. * A service responsible for updating the metadata used by system indices.
@ -38,47 +49,62 @@ public class SystemIndexMetadataUpgradeService implements ClusterStateListener {
private final SystemIndices systemIndices; private final SystemIndices systemIndices;
private final ClusterService clusterService; private final ClusterService clusterService;
private final MasterServiceTaskQueue<SystemIndexMetadataUpgradeTask> taskQueue;
private volatile boolean updateTaskPending = false;
private volatile long triggeredVersion = -1L;
public SystemIndexMetadataUpgradeService(SystemIndices systemIndices, ClusterService clusterService) { public SystemIndexMetadataUpgradeService(SystemIndices systemIndices, ClusterService clusterService) {
this.systemIndices = systemIndices; this.systemIndices = systemIndices;
this.clusterService = clusterService; this.clusterService = clusterService;
this.taskQueue = clusterService.createTaskQueue(
"system-indices-metadata-upgrade",
Priority.NORMAL,
new SystemIndexMetadataUpgradeExecutor()
);
} }
@Override @Override
public void clusterChanged(ClusterChangedEvent event) { public void clusterChanged(ClusterChangedEvent event) {
if (updateTaskPending == false @FixForMultiProject
&& event.localNodeMaster() ProjectMetadata currentMetadata = event.state().metadata().getProject();
ProjectMetadata previousMetadata = event.previousState().metadata().getProject();
if (event.localNodeMaster()
&& (event.previousState().nodes().isLocalNodeElectedMaster() == false && (event.previousState().nodes().isLocalNodeElectedMaster() == false
|| event.state().metadata().getProject().indices() != event.previousState().metadata().getProject().indices())) { || currentMetadata.indices() != previousMetadata.indices()
final Map<String, IndexMetadata> indexMetadataMap = event.state().metadata().getProject().indices(); || currentMetadata.dataStreams() != previousMetadata.dataStreams())) {
final var previousIndices = event.previousState().metadata().getProject().indices(); final Map<String, IndexMetadata> indexMetadataMap = currentMetadata.indices();
final long triggerV = event.state().version(); final var previousIndices = previousMetadata.indices();
triggeredVersion = triggerV; Map<String, DataStream> dataStreams = currentMetadata.dataStreams();
Map<String, DataStream> previousDataStreams = previousMetadata.dataStreams();
// Fork to the management pool to avoid blocking the cluster applier thread unnecessarily for very large index counts // Fork to the management pool to avoid blocking the cluster applier thread unnecessarily for very large index counts
// TODO: we should have a more efficient way of getting just the changed indices so that we don't have to fork here // TODO: we should have a more efficient way of getting just the changed indices so that we don't have to fork here
clusterService.threadPool().executor(ThreadPool.Names.MANAGEMENT).execute(new AbstractRunnable() { clusterService.threadPool().executor(ThreadPool.Names.MANAGEMENT).execute(new AbstractRunnable() {
@Override @Override
protected void doRun() { protected void doRun() {
if (triggeredVersion != triggerV) { Collection<DataStream> changedDataStreams = new ArrayList<>();
// don't run if another newer check task was triggered already Set<Index> dataStreamIndices = new HashSet<>();
return; for (Map.Entry<String, DataStream> cursor : dataStreams.entrySet()) {
} DataStream dataStream = cursor.getValue();
for (Map.Entry<String, IndexMetadata> cursor : indexMetadataMap.entrySet()) { if (dataStream != previousDataStreams.get(cursor.getKey())) {
if (cursor.getValue() != previousIndices.get(cursor.getKey())) { if (requiresUpdate(dataStream)) {
IndexMetadata indexMetadata = cursor.getValue(); changedDataStreams.add(dataStream);
if (requiresUpdate(indexMetadata)) {
updateTaskPending = true;
submitUnbatchedTask(
"system_index_metadata_upgrade_service {system metadata change}",
new SystemIndexMetadataUpdateTask()
);
break;
} }
} }
getIndicesBackingDataStream(dataStream).forEach(dataStreamIndices::add);
}
Collection<Index> changedIndices = new ArrayList<>();
for (Map.Entry<String, IndexMetadata> cursor : indexMetadataMap.entrySet()) {
IndexMetadata indexMetadata = cursor.getValue();
Index index = indexMetadata.getIndex();
if (cursor.getValue() != previousIndices.get(cursor.getKey()) && dataStreamIndices.contains(index) == false) {
if (requiresUpdate(indexMetadata)) {
changedIndices.add(index);
}
}
}
if (changedIndices.isEmpty() == false || changedDataStreams.isEmpty() == false) {
submitUpdateTask(changedIndices, changedDataStreams);
} }
} }
@ -91,6 +117,12 @@ public class SystemIndexMetadataUpgradeService implements ClusterStateListener {
} }
} }
// visible for testing
void submitUpdateTask(Collection<Index> changedIndices, Collection<DataStream> changedDataStreams) {
SystemIndexMetadataUpgradeTask task = new SystemIndexMetadataUpgradeTask(changedIndices, changedDataStreams);
taskQueue.submitTask("system-index-metadata-upgrade-service", task, null);
}
// package-private for testing // package-private for testing
boolean requiresUpdate(IndexMetadata indexMetadata) { boolean requiresUpdate(IndexMetadata indexMetadata) {
final boolean shouldBeSystem = shouldBeSystem(indexMetadata); final boolean shouldBeSystem = shouldBeSystem(indexMetadata);
@ -107,6 +139,30 @@ public class SystemIndexMetadataUpgradeService implements ClusterStateListener {
return false; return false;
} }
// package-private for testing
boolean requiresUpdate(DataStream dataStream) {
final boolean shouldBeSystem = shouldBeSystem(dataStream);
// should toggle system index status
if (shouldBeSystem != dataStream.isSystem()) {
return true;
}
if (shouldBeSystem) {
return dataStream.isHidden() == false;
}
return false;
}
private boolean shouldBeSystem(DataStream dataStream) {
return systemIndices.isSystemDataStream(dataStream.getName());
}
private static Stream<Index> getIndicesBackingDataStream(DataStream dataStream) {
return Stream.concat(dataStream.getIndices().stream(), dataStream.getFailureIndices().stream());
}
// package-private for testing // package-private for testing
static boolean isVisible(IndexMetadata indexMetadata) { static boolean isVisible(IndexMetadata indexMetadata) {
return indexMetadata.getSettings().getAsBoolean(IndexMetadata.SETTING_INDEX_HIDDEN, false) == false; return indexMetadata.getSettings().getAsBoolean(IndexMetadata.SETTING_INDEX_HIDDEN, false) == false;
@ -114,8 +170,7 @@ public class SystemIndexMetadataUpgradeService implements ClusterStateListener {
// package-private for testing // package-private for testing
boolean shouldBeSystem(IndexMetadata indexMetadata) { boolean shouldBeSystem(IndexMetadata indexMetadata) {
return systemIndices.isSystemIndex(indexMetadata.getIndex()) return systemIndices.isSystemIndex(indexMetadata.getIndex());
|| systemIndices.isSystemIndexBackingDataStream(indexMetadata.getIndex().getName());
} }
// package-private for testing // package-private for testing
@ -123,73 +178,149 @@ public class SystemIndexMetadataUpgradeService implements ClusterStateListener {
return indexMetadata.getAliases().values().stream().anyMatch(a -> Boolean.FALSE.equals(a.isHidden())); return indexMetadata.getAliases().values().stream().anyMatch(a -> Boolean.FALSE.equals(a.isHidden()));
} }
@SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here private record SystemIndexMetadataUpgradeTask(Collection<Index> changedIndices, Collection<DataStream> changedDataStreams)
private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String source, ClusterStateUpdateTask task) { implements
clusterService.submitUnbatchedStateUpdateTask(source, task); ClusterStateTaskListener {
}
// visible for testing
SystemIndexMetadataUpdateTask getTask() {
return new SystemIndexMetadataUpdateTask();
}
public class SystemIndexMetadataUpdateTask extends ClusterStateUpdateTask {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
final Map<String, IndexMetadata> indexMetadataMap = currentState.metadata().getProject().indices();
final List<IndexMetadata> updatedMetadata = new ArrayList<>();
for (Map.Entry<String, IndexMetadata> entry : indexMetadataMap.entrySet()) {
final IndexMetadata indexMetadata = entry.getValue();
final boolean shouldBeSystem = shouldBeSystem(indexMetadata);
IndexMetadata.Builder builder = IndexMetadata.builder(indexMetadata);
boolean updated = false;
if (shouldBeSystem != indexMetadata.isSystem()) {
builder.system(indexMetadata.isSystem() == false);
updated = true;
}
if (shouldBeSystem && isVisible(indexMetadata)) {
builder.settings(Settings.builder().put(indexMetadata.getSettings()).put(IndexMetadata.SETTING_INDEX_HIDDEN, true));
builder.settingsVersion(builder.settingsVersion() + 1);
updated = true;
}
if (shouldBeSystem && hasVisibleAlias(indexMetadata)) {
for (AliasMetadata aliasMetadata : indexMetadata.getAliases().values()) {
if (Boolean.FALSE.equals(aliasMetadata.isHidden())) {
builder.removeAlias(aliasMetadata.alias());
builder.putAlias(
AliasMetadata.builder(aliasMetadata.alias())
.filter(aliasMetadata.filter())
.indexRouting(aliasMetadata.indexRouting())
.isHidden(true)
.searchRouting(aliasMetadata.searchRouting())
.writeIndex(aliasMetadata.writeIndex())
);
}
}
}
if (updated) {
updatedMetadata.add(builder.build());
}
}
if (updatedMetadata.isEmpty() == false) {
final Metadata.Builder builder = Metadata.builder(currentState.metadata());
updatedMetadata.forEach(idxMeta -> builder.put(idxMeta, true));
return ClusterState.builder(currentState).metadata(builder).build();
}
return currentState;
}
@Override @Override
public void onFailure(Exception e) { public void onFailure(Exception e) {
updateTaskPending = false; logger.error("System index metadata upgrade failed", e);
logger.error("failed to update system index metadata", e);
} }
@Override @Override
public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { public String toString() {
updateTaskPending = false; return "SystemIndexMetadataUpgradeTask[changedIndices="
+ changedIndices.stream().map(Index::getName).collect(Collectors.joining(","))
+ ";changedDataStreams="
+ changedDataStreams.stream().map(DataStream::getName).collect(Collectors.joining(","))
+ "]";
}
}
private class SystemIndexMetadataUpgradeExecutor implements ClusterStateTaskExecutor<SystemIndexMetadataUpgradeTask> {
@Override
public ClusterState execute(BatchExecutionContext<SystemIndexMetadataUpgradeTask> batchExecutionContext) {
ClusterState initialState = batchExecutionContext.initialState();
List<? extends TaskContext<SystemIndexMetadataUpgradeTask>> taskContexts = batchExecutionContext.taskContexts();
List<Index> indices = taskContexts.stream()
.map(TaskContext::getTask)
.map(SystemIndexMetadataUpgradeTask::changedIndices)
.flatMap(Collection::stream)
.toList();
List<IndexMetadata> updatedMetadata = updateIndices(initialState, indices);
List<DataStream> dataStreams = taskContexts.stream()
.map(TaskContext::getTask)
.map(SystemIndexMetadataUpgradeTask::changedDataStreams)
.flatMap(Collection::stream)
.toList();
List<DataStream> updatedDataStreams = updateDataStreams(dataStreams);
List<IndexMetadata> updatedBackingIndices = updateIndicesBackingDataStreams(initialState, updatedDataStreams);
for (TaskContext<SystemIndexMetadataUpgradeTask> taskContext : taskContexts) {
taskContext.success(() -> {});
}
if (updatedMetadata.isEmpty() == false || updatedDataStreams.isEmpty() == false) {
Metadata.Builder builder = Metadata.builder(initialState.metadata());
updatedMetadata.forEach(idxMeta -> builder.put(idxMeta, true));
updatedDataStreams.forEach(builder::put);
updatedBackingIndices.forEach(idxMeta -> builder.put(idxMeta, true));
return ClusterState.builder(initialState).metadata(builder).build();
}
return initialState;
}
private List<IndexMetadata> updateIndices(ClusterState currentState, List<Index> indices) {
if (indices.isEmpty()) {
return Collections.emptyList();
}
Metadata metadata = currentState.metadata();
final List<IndexMetadata> updatedMetadata = new ArrayList<>();
for (Index index : indices) {
IndexMetadata indexMetadata = metadata.getProject().index(index);
final boolean shouldBeSystem = shouldBeSystem(indexMetadata);
IndexMetadata updatedIndexMetadata = updateIndexIfNecessary(indexMetadata, shouldBeSystem);
if (updatedIndexMetadata != null) {
updatedMetadata.add(updatedIndexMetadata);
}
}
return updatedMetadata;
}
private IndexMetadata updateIndexIfNecessary(IndexMetadata indexMetadata, boolean shouldBeSystem) {
IndexMetadata.Builder builder = IndexMetadata.builder(indexMetadata);
boolean updated = false;
if (shouldBeSystem != indexMetadata.isSystem()) {
builder.system(indexMetadata.isSystem() == false);
updated = true;
}
if (shouldBeSystem && isVisible(indexMetadata)) {
builder.settings(Settings.builder().put(indexMetadata.getSettings()).put(IndexMetadata.SETTING_INDEX_HIDDEN, true));
builder.settingsVersion(builder.settingsVersion() + 1);
updated = true;
}
if (shouldBeSystem && hasVisibleAlias(indexMetadata)) {
for (AliasMetadata aliasMetadata : indexMetadata.getAliases().values()) {
if (Boolean.FALSE.equals(aliasMetadata.isHidden())) {
builder.removeAlias(aliasMetadata.alias());
builder.putAlias(
AliasMetadata.builder(aliasMetadata.alias())
.filter(aliasMetadata.filter())
.indexRouting(aliasMetadata.indexRouting())
.isHidden(true)
.searchRouting(aliasMetadata.searchRouting())
.writeIndex(aliasMetadata.writeIndex())
);
updated = true;
}
}
}
return updated ? builder.build() : null;
}
private List<DataStream> updateDataStreams(List<DataStream> dataStreams) {
if (dataStreams.isEmpty()) {
return Collections.emptyList();
}
List<DataStream> updatedDataStreams = new ArrayList<>();
for (DataStream dataStream : dataStreams) {
boolean shouldBeSystem = shouldBeSystem(dataStream);
if (dataStream.isSystem() != shouldBeSystem) {
DataStream.Builder dataStreamBuilder = dataStream.copy().setSystem(shouldBeSystem);
if (shouldBeSystem) {
dataStreamBuilder.setHidden(true);
}
updatedDataStreams.add(dataStreamBuilder.build());
}
}
return updatedDataStreams;
}
private List<IndexMetadata> updateIndicesBackingDataStreams(ClusterState currentState, List<DataStream> updatedDataStreams) {
if (updatedDataStreams.isEmpty()) {
return Collections.emptyList();
}
Metadata metadata = currentState.metadata();
final List<IndexMetadata> updatedMetadata = new ArrayList<>();
for (DataStream updatedDataStream : updatedDataStreams) {
boolean shouldBeSystem = updatedDataStream.isSystem();
List<IndexMetadata> updatedIndicesMetadata = getIndicesBackingDataStreamMetadata(metadata.getProject(), updatedDataStream)
.map(idx -> updateIndexIfNecessary(idx, shouldBeSystem))
.filter(Objects::nonNull)
.toList();
updatedMetadata.addAll(updatedIndicesMetadata);
}
return updatedMetadata;
}
private Stream<IndexMetadata> getIndicesBackingDataStreamMetadata(ProjectMetadata projectMetadata, DataStream dataStream) {
return getIndicesBackingDataStream(dataStream).map(projectMetadata::index);
} }
} }
} }

View file

@ -25,7 +25,9 @@ import java.math.RoundingMode;
import java.util.Locale; import java.util.Locale;
import java.util.Objects; import java.util.Objects;
import static org.elasticsearch.TransportVersions.BYTE_SIZE_VALUE_ALWAYS_USES_BYTES;
import static org.elasticsearch.TransportVersions.BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1; import static org.elasticsearch.TransportVersions.BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1;
import static org.elasticsearch.TransportVersions.BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_90;
import static org.elasticsearch.TransportVersions.REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1; import static org.elasticsearch.TransportVersions.REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1;
import static org.elasticsearch.common.unit.ByteSizeUnit.BYTES; import static org.elasticsearch.common.unit.ByteSizeUnit.BYTES;
import static org.elasticsearch.common.unit.ByteSizeUnit.GB; import static org.elasticsearch.common.unit.ByteSizeUnit.GB;
@ -113,8 +115,7 @@ public class ByteSizeValue implements Writeable, Comparable<ByteSizeValue>, ToXC
public static ByteSizeValue readFrom(StreamInput in) throws IOException { public static ByteSizeValue readFrom(StreamInput in) throws IOException {
long size = in.readZLong(); long size = in.readZLong();
ByteSizeUnit unit = ByteSizeUnit.readFrom(in); ByteSizeUnit unit = ByteSizeUnit.readFrom(in);
TransportVersion tv = in.getTransportVersion(); if (alwaysUseBytes(in.getTransportVersion())) {
if (tv.onOrAfter(BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1) && tv.before(REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1)) {
return newByteSizeValue(size, unit); return newByteSizeValue(size, unit);
} else { } else {
return of(size, unit); return of(size, unit);
@ -123,8 +124,7 @@ public class ByteSizeValue implements Writeable, Comparable<ByteSizeValue>, ToXC
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
TransportVersion tv = out.getTransportVersion(); if (alwaysUseBytes(out.getTransportVersion())) {
if (tv.onOrAfter(BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1) && tv.before(REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1)) {
out.writeZLong(sizeInBytes); out.writeZLong(sizeInBytes);
} else { } else {
out.writeZLong(Math.divideExact(sizeInBytes, desiredUnit.toBytes(1))); out.writeZLong(Math.divideExact(sizeInBytes, desiredUnit.toBytes(1)));
@ -132,6 +132,12 @@ public class ByteSizeValue implements Writeable, Comparable<ByteSizeValue>, ToXC
desiredUnit.writeTo(out); desiredUnit.writeTo(out);
} }
private static boolean alwaysUseBytes(TransportVersion tv) {
return tv.onOrAfter(BYTE_SIZE_VALUE_ALWAYS_USES_BYTES)
|| tv.isPatchFrom(BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_90)
|| tv.between(BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1, REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1);
}
ByteSizeValue(long sizeInBytes, ByteSizeUnit desiredUnit) { ByteSizeValue(long sizeInBytes, ByteSizeUnit desiredUnit) {
this.sizeInBytes = sizeInBytes; this.sizeInBytes = sizeInBytes;
this.desiredUnit = desiredUnit; this.desiredUnit = desiredUnit;

View file

@ -52,10 +52,10 @@ import static org.elasticsearch.health.HealthStatus.GREEN;
import static org.elasticsearch.health.HealthStatus.RED; import static org.elasticsearch.health.HealthStatus.RED;
/** /**
* This class periodically logs the results of the Health API to the standard Elasticsearch server log file. It a lifecycle * This class periodically logs the results of the Health API to the standard Elasticsearch server log file. It is a lifecycle
* aware component because it health depends on other lifecycle aware components. This means: * aware component because it depends on other lifecycle aware components. This means:
* - We do not schedule any jobs until the lifecycle state is STARTED * - We do not schedule any jobs until the lifecycle state is STARTED
* - When the lifecycle state becomes STOPPED, do not schedule any more runs, but we do let the current one finish * - When the lifecycle state becomes STOPPED, we do not schedule any more runs, but we do let the current one finish
* - When the lifecycle state becomes CLOSED, we will interrupt the current run as well. * - When the lifecycle state becomes CLOSED, we will interrupt the current run as well.
*/ */
public class HealthPeriodicLogger extends AbstractLifecycleComponent implements ClusterStateListener, SchedulerEngine.Listener { public class HealthPeriodicLogger extends AbstractLifecycleComponent implements ClusterStateListener, SchedulerEngine.Listener {
@ -361,11 +361,24 @@ public class HealthPeriodicLogger extends AbstractLifecycleComponent implements
String.format(Locale.ROOT, "%s.%s.status", HEALTH_FIELD_PREFIX, indicatorResult.name()), String.format(Locale.ROOT, "%s.%s.status", HEALTH_FIELD_PREFIX, indicatorResult.name()),
indicatorResult.status().xContentValue() indicatorResult.status().xContentValue()
); );
if (GREEN.equals(indicatorResult.status()) == false && indicatorResult.details() != null) { if (GREEN.equals(indicatorResult.status()) == false) {
result.put( // indicator details
String.format(Locale.ROOT, "%s.%s.details", HEALTH_FIELD_PREFIX, indicatorResult.name()), if (indicatorResult.details() != null) {
Strings.toString(indicatorResult.details()) result.put(
); String.format(Locale.ROOT, "%s.%s.details", HEALTH_FIELD_PREFIX, indicatorResult.name()),
Strings.toString(indicatorResult.details())
);
}
// indicator impact
if (indicatorResult.impacts() != null) {
indicatorResult.impacts()
.forEach(
impact -> result.put(
String.format(Locale.ROOT, "%s.%s.%s.impacted", HEALTH_FIELD_PREFIX, indicatorResult.name(), impact.id()),
true
)
);
}
} }
}); });

View file

@ -67,7 +67,8 @@ public class DiskHealthIndicatorService implements HealthIndicatorService {
private static final Logger logger = LogManager.getLogger(DiskHealthIndicatorService.class); private static final Logger logger = LogManager.getLogger(DiskHealthIndicatorService.class);
private static final String IMPACT_INGEST_UNAVAILABLE_ID = "ingest_capability_unavailable"; // VisibleForTesting
public static final String IMPACT_INGEST_UNAVAILABLE_ID = "ingest_capability_unavailable";
private static final String IMPACT_INGEST_AT_RISK_ID = "ingest_capability_at_risk"; private static final String IMPACT_INGEST_AT_RISK_ID = "ingest_capability_at_risk";
private static final String IMPACT_CLUSTER_STABILITY_AT_RISK_ID = "cluster_stability_at_risk"; private static final String IMPACT_CLUSTER_STABILITY_AT_RISK_ID = "cluster_stability_at_risk";
private static final String IMPACT_CLUSTER_FUNCTIONALITY_UNAVAILABLE_ID = "cluster_functionality_unavailable"; private static final String IMPACT_CLUSTER_FUNCTIONALITY_UNAVAILABLE_ID = "cluster_functionality_unavailable";

View file

@ -9,17 +9,17 @@
package org.elasticsearch.indices; package org.elasticsearch.indices;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComponentTemplate;
import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.index.Index;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Stream;
import static org.elasticsearch.indices.AssociatedIndexDescriptor.buildAutomaton;
/** /**
* Describes a {@link DataStream} that is reserved for use by a system feature. * Describes a {@link DataStream} that is reserved for use by a system feature.
@ -53,7 +53,6 @@ public class SystemDataStreamDescriptor {
private final Map<String, ComponentTemplate> componentTemplates; private final Map<String, ComponentTemplate> componentTemplates;
private final List<String> allowedElasticProductOrigins; private final List<String> allowedElasticProductOrigins;
private final ExecutorNames executorNames; private final ExecutorNames executorNames;
private final CharacterRunAutomaton characterRunAutomaton;
/** /**
* Creates a new descriptor for a system data descriptor * Creates a new descriptor for a system data descriptor
@ -96,8 +95,6 @@ public class SystemDataStreamDescriptor {
throw new IllegalArgumentException("External system data stream without allowed products is not a valid combination"); throw new IllegalArgumentException("External system data stream without allowed products is not a valid combination");
} }
this.executorNames = Objects.nonNull(executorNames) ? executorNames : ExecutorNames.DEFAULT_SYSTEM_DATA_STREAM_THREAD_POOLS; this.executorNames = Objects.nonNull(executorNames) ? executorNames : ExecutorNames.DEFAULT_SYSTEM_DATA_STREAM_THREAD_POOLS;
this.characterRunAutomaton = new CharacterRunAutomaton(buildAutomaton(backingIndexPatternForDataStream(this.dataStreamName)));
} }
public String getDataStreamName() { public String getDataStreamName() {
@ -110,7 +107,11 @@ public class SystemDataStreamDescriptor {
* @return List of names of backing indices * @return List of names of backing indices
*/ */
public List<String> getBackingIndexNames(Metadata metadata) { public List<String> getBackingIndexNames(Metadata metadata) {
return metadata.getProject().indices().keySet().stream().filter(this.characterRunAutomaton::run).toList(); DataStream dataStream = metadata.getProject().dataStreams().get(dataStreamName);
if (dataStream == null) {
return Collections.emptyList();
}
return Stream.concat(dataStream.getIndices().stream(), dataStream.getFailureIndices().stream()).map(Index::getName).toList();
} }
public String getDescription() { public String getDescription() {

View file

@ -22,6 +22,7 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.AbstractSearchTestCase; import org.elasticsearch.search.AbstractSearchTestCase;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.PointInTimeBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.collapse.CollapseBuilder;
@ -173,22 +174,7 @@ public class ExpandSearchPhaseTests extends ESTestCase {
@Override @Override
void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener<MultiSearchResponse> listener) { void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener<MultiSearchResponse> listener) {
assertTrue(executedMultiSearch.compareAndSet(false, true)); assertTrue(executedMultiSearch.compareAndSet(false, true));
SearchResponse searchResponse = new SearchResponse( SearchResponse searchResponse = SearchResponseUtils.successfulResponse(collapsedHits);
collapsedHits,
null,
null,
false,
null,
null,
1,
null,
1,
1,
0,
0,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
);
ActionListener.respondAndRelease( ActionListener.respondAndRelease(
listener, listener,
new MultiSearchResponse( new MultiSearchResponse(

View file

@ -30,6 +30,7 @@ import java.util.concurrent.TimeUnit;
import java.util.function.Supplier; import java.util.function.Supplier;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
import static org.elasticsearch.indices.SystemIndices.EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY;
import static org.elasticsearch.indices.SystemIndices.SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY; import static org.elasticsearch.indices.SystemIndices.SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
@ -225,18 +226,6 @@ public class IndexAbstractionResolverTests extends ESTestCase {
assertThat(isIndexVisible("data-stream1", "failures"), is(true)); assertThat(isIndexVisible("data-stream1", "failures"), is(true));
} }
private boolean isIndexVisible(String index, String selector) {
return IndexAbstractionResolver.isIndexVisible(
"*",
selector,
index,
IndicesOptions.strictExpandHidden(),
projectMetadata,
indexNameExpressionResolver,
true
);
}
public void testIsNetNewSystemIndexVisible() { public void testIsNetNewSystemIndexVisible() {
final Settings settings = Settings.builder() final Settings settings = Settings.builder()
.put("index.number_of_replicas", 0) .put("index.number_of_replicas", 0)
@ -276,20 +265,83 @@ public class IndexAbstractionResolverTests extends ESTestCase {
List.of(new SystemIndices.Feature("name", "description", List.of(fooDescriptor, barDescriptor))) List.of(new SystemIndices.Feature("name", "description", List.of(fooDescriptor, barDescriptor)))
); );
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); projectMetadata = ProjectMetadata.builder(projectMetadata.id()).put(foo, true).put(barReindexed, true).put(other, true).build();
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "false");
indexNameExpressionResolver = new IndexNameExpressionResolver(
threadContext,
systemIndices,
TestProjectResolvers.DEFAULT_PROJECT_ONLY
);
indexAbstractionResolver = new IndexAbstractionResolver(indexNameExpressionResolver);
projectMetadata = Metadata.builder().put(foo, true).put(barReindexed, true).put(other, true).build().getProject(); // these indices options are for the GET _data_streams case
final IndicesOptions noHiddenNoAliases = IndicesOptions.builder()
.wildcardOptions(
IndicesOptions.WildcardOptions.builder()
.matchOpen(true)
.matchClosed(true)
.includeHidden(false)
.resolveAliases(false)
.build()
)
.build();
assertThat(isIndexVisible("other", "*"), is(true)); {
assertThat(isIndexVisible(".foo", "*"), is(false)); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
assertThat(isIndexVisible(".bar", "*"), is(false)); threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "true");
indexNameExpressionResolver = new IndexNameExpressionResolver(
threadContext,
systemIndices,
TestProjectResolvers.singleProject(projectMetadata.id())
);
indexAbstractionResolver = new IndexAbstractionResolver(indexNameExpressionResolver);
// this covers the GET * case -- with system access, you can see everything
assertThat(isIndexVisible("other", "*"), is(true));
assertThat(isIndexVisible(".foo", "*"), is(true));
assertThat(isIndexVisible(".bar", "*"), is(true));
// but if you don't ask for hidden and aliases, you won't see hidden indices or aliases, naturally
assertThat(isIndexVisible("other", "*", noHiddenNoAliases), is(true));
assertThat(isIndexVisible(".foo", "*", noHiddenNoAliases), is(false));
assertThat(isIndexVisible(".bar", "*", noHiddenNoAliases), is(false));
}
{
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "false");
indexNameExpressionResolver = new IndexNameExpressionResolver(
threadContext,
systemIndices,
TestProjectResolvers.DEFAULT_PROJECT_ONLY
);
indexAbstractionResolver = new IndexAbstractionResolver(indexNameExpressionResolver);
// this covers the GET * case -- without system access, you can't see everything
assertThat(isIndexVisible("other", "*"), is(true));
assertThat(isIndexVisible(".foo", "*"), is(false));
assertThat(isIndexVisible(".bar", "*"), is(false));
// no difference here in the datastream case, you can't see these then, either
assertThat(isIndexVisible("other", "*", noHiddenNoAliases), is(true));
assertThat(isIndexVisible(".foo", "*", noHiddenNoAliases), is(false));
assertThat(isIndexVisible(".bar", "*", noHiddenNoAliases), is(false));
}
{
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "true");
threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "some-elastic-product");
indexNameExpressionResolver = new IndexNameExpressionResolver(
threadContext,
systemIndices,
TestProjectResolvers.singleProject(projectMetadata.id())
);
indexAbstractionResolver = new IndexAbstractionResolver(indexNameExpressionResolver);
// this covers the GET * case -- with product (only) access, you can't see everything
assertThat(isIndexVisible("other", "*"), is(true));
assertThat(isIndexVisible(".foo", "*"), is(false));
assertThat(isIndexVisible(".bar", "*"), is(false));
// no difference here in the datastream case, you can't see these then, either
assertThat(isIndexVisible("other", "*", noHiddenNoAliases), is(true));
assertThat(isIndexVisible(".foo", "*", noHiddenNoAliases), is(false));
assertThat(isIndexVisible(".bar", "*", noHiddenNoAliases), is(false));
}
} }
private static XContentBuilder mappings() { private static XContentBuilder mappings() {
@ -317,4 +369,21 @@ public class IndexAbstractionResolverTests extends ESTestCase {
private List<String> resolveAbstractions(List<String> expressions, IndicesOptions indicesOptions, Supplier<Set<String>> mask) { private List<String> resolveAbstractions(List<String> expressions, IndicesOptions indicesOptions, Supplier<Set<String>> mask) {
return indexAbstractionResolver.resolveIndexAbstractions(expressions, indicesOptions, projectMetadata, mask, (idx) -> true, true); return indexAbstractionResolver.resolveIndexAbstractions(expressions, indicesOptions, projectMetadata, mask, (idx) -> true, true);
} }
private boolean isIndexVisible(String index, String selector) {
return isIndexVisible(index, selector, IndicesOptions.strictExpandHidden());
}
private boolean isIndexVisible(String index, String selector, IndicesOptions indicesOptions) {
return IndexAbstractionResolver.isIndexVisible(
"*",
selector,
index,
indicesOptions,
projectMetadata,
indexNameExpressionResolver,
true
);
}
} }

View file

@ -11,20 +11,32 @@ package org.elasticsearch.cluster.metadata;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateTaskExecutor;
import org.elasticsearch.cluster.ClusterStateTaskListener;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils;
import org.elasticsearch.cluster.service.MasterServiceTaskQueue;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.indices.ExecutorNames;
import org.elasticsearch.indices.SystemDataStreamDescriptor;
import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.SystemIndexDescriptor;
import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.indices.SystemIndices;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.Before; import org.junit.Before;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class SystemIndexMetadataUpgradeServiceTests extends ESTestCase { public class SystemIndexMetadataUpgradeServiceTests extends ESTestCase {
@ -49,17 +61,62 @@ public class SystemIndexMetadataUpgradeServiceTests extends ESTestCase {
.setOrigin("FAKE_ORIGIN") .setOrigin("FAKE_ORIGIN")
.build(); .build();
private SystemIndexMetadataUpgradeService service; private static final String SYSTEM_DATA_STREAM_NAME = ".my-ds";
private static final String SYSTEM_DATA_STREAM_INDEX_NAME = DataStream.BACKING_INDEX_PREFIX + SYSTEM_DATA_STREAM_NAME + "-1";
private static final String SYSTEM_DATA_STREAM_FAILSTORE_NAME = DataStream.FAILURE_STORE_PREFIX + SYSTEM_DATA_STREAM_NAME;
private static final SystemDataStreamDescriptor SYSTEM_DATA_STREAM_DESCRIPTOR = new SystemDataStreamDescriptor(
SYSTEM_DATA_STREAM_NAME,
"System datastream for test",
SystemDataStreamDescriptor.Type.INTERNAL,
ComposableIndexTemplate.builder().build(),
Collections.emptyMap(),
Collections.singletonList("FAKE_ORIGIN"),
ExecutorNames.DEFAULT_SYSTEM_DATA_STREAM_THREAD_POOLS
);
private SystemIndexMetadataUpgradeService service;
private ClusterStateTaskListener task;
private ClusterStateTaskExecutor<ClusterStateTaskListener> executor;
@SuppressWarnings("unchecked")
@Before @Before
public void setUpTest() { public void setUpTest() {
// set up a system index upgrade service // set up a system index upgrade service
ClusterService clusterService = mock(ClusterService.class);
MasterServiceTaskQueue<ClusterStateTaskListener> queue = mock(MasterServiceTaskQueue.class);
when(clusterService.createTaskQueue(eq("system-indices-metadata-upgrade"), eq(Priority.NORMAL), any())).thenAnswer(invocation -> {
executor = invocation.getArgument(2, ClusterStateTaskExecutor.class);
return queue;
});
doAnswer(invocation -> {
task = invocation.getArgument(1, ClusterStateTaskListener.class);
return null;
}).when(queue).submitTask(any(), any(), any());
this.service = new SystemIndexMetadataUpgradeService( this.service = new SystemIndexMetadataUpgradeService(
new SystemIndices(List.of(new SystemIndices.Feature("foo", "a test feature", List.of(DESCRIPTOR)))), new SystemIndices(
mock(ClusterService.class) List.of(
new SystemIndices.Feature("foo", "a test feature", List.of(DESCRIPTOR)),
new SystemIndices.Feature(
"sds",
"system data stream feature",
Collections.emptyList(),
Collections.singletonList(SYSTEM_DATA_STREAM_DESCRIPTOR)
)
)
),
clusterService
); );
} }
private ClusterState executeTask(ClusterState clusterState) {
try {
return ClusterStateTaskExecutorUtils.executeAndAssertSuccessful(clusterState, executor, Collections.singletonList(task));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/** /**
* When we upgrade Elasticsearch versions, existing indices may be newly * When we upgrade Elasticsearch versions, existing indices may be newly
* defined as system indices. If such indices are set without "hidden," we need * defined as system indices. If such indices are set without "hidden," we need
@ -75,6 +132,54 @@ public class SystemIndexMetadataUpgradeServiceTests extends ESTestCase {
assertSystemUpgradeAppliesHiddenSetting(hiddenIndexMetadata); assertSystemUpgradeAppliesHiddenSetting(hiddenIndexMetadata);
} }
public void testUpgradeDataStreamToSystemDataStream() {
IndexMetadata dsIndexMetadata = IndexMetadata.builder(SYSTEM_DATA_STREAM_INDEX_NAME)
.system(false)
.settings(getSettingsBuilder().put(IndexMetadata.SETTING_INDEX_HIDDEN, true))
.build();
IndexMetadata fsIndexMetadata = IndexMetadata.builder(SYSTEM_DATA_STREAM_FAILSTORE_NAME)
.system(false)
.settings(getSettingsBuilder().put(IndexMetadata.SETTING_INDEX_HIDDEN, true))
.build();
DataStream.DataStreamIndices failureIndices = DataStream.DataStreamIndices.failureIndicesBuilder(
Collections.singletonList(fsIndexMetadata.getIndex())
).build();
DataStream dataStream = DataStream.builder(SYSTEM_DATA_STREAM_NAME, Collections.singletonList(dsIndexMetadata.getIndex()))
.setFailureIndices(failureIndices)
.setHidden(false)
.setSystem(false)
.build();
assertTrue(dataStream.containsIndex(dsIndexMetadata.getIndex().getName()));
assertTrue(dataStream.containsIndex(fsIndexMetadata.getIndex().getName()));
Metadata.Builder clusterMetadata = new Metadata.Builder();
clusterMetadata.put(dataStream);
clusterMetadata.put(dsIndexMetadata, true);
clusterMetadata.put(fsIndexMetadata, true);
ClusterState clusterState = ClusterState.builder(new ClusterName("system-index-metadata-upgrade-service-tests"))
.metadata(clusterMetadata.build())
.customs(Map.of())
.build();
service.submitUpdateTask(Collections.emptyList(), Collections.singletonList(dataStream));
// Execute a metadata upgrade task on the initial cluster state
ClusterState newState = executeTask(clusterState);
DataStream updatedDataStream = newState.metadata().getProject().dataStreams().get(dataStream.getName());
assertThat(updatedDataStream.isSystem(), equalTo(true));
assertThat(updatedDataStream.isHidden(), equalTo(true));
IndexMetadata updatedIndexMetadata = newState.metadata().getProject().index(dsIndexMetadata.getIndex().getName());
assertThat(updatedIndexMetadata.isSystem(), equalTo(true));
assertThat(updatedIndexMetadata.isHidden(), equalTo(true));
IndexMetadata updatedFailstoreMetadata = newState.metadata().getProject().index(fsIndexMetadata.getIndex().getName());
assertThat(updatedFailstoreMetadata.isSystem(), equalTo(true));
assertThat(updatedFailstoreMetadata.isHidden(), equalTo(true));
}
/** /**
* If a system index erroneously is set to visible, we should remedy that situation. * If a system index erroneously is set to visible, we should remedy that situation.
*/ */
@ -209,7 +314,7 @@ public class SystemIndexMetadataUpgradeServiceTests extends ESTestCase {
assertThat(service.requiresUpdate(systemVisibleIndex), equalTo(true)); assertThat(service.requiresUpdate(systemVisibleIndex), equalTo(true));
} }
private void assertSystemUpgradeAppliesHiddenSetting(IndexMetadata hiddenIndexMetadata) throws Exception { private void assertSystemUpgradeAppliesHiddenSetting(IndexMetadata hiddenIndexMetadata) {
assertTrue("Metadata should require update but does not", service.requiresUpdate(hiddenIndexMetadata)); assertTrue("Metadata should require update but does not", service.requiresUpdate(hiddenIndexMetadata));
Metadata.Builder clusterMetadata = new Metadata.Builder(); Metadata.Builder clusterMetadata = new Metadata.Builder();
clusterMetadata.put(IndexMetadata.builder(hiddenIndexMetadata)); clusterMetadata.put(IndexMetadata.builder(hiddenIndexMetadata));
@ -219,8 +324,9 @@ public class SystemIndexMetadataUpgradeServiceTests extends ESTestCase {
.customs(Map.of()) .customs(Map.of())
.build(); .build();
service.submitUpdateTask(Collections.singletonList(hiddenIndexMetadata.getIndex()), Collections.emptyList());
// Get a metadata upgrade task and execute it on the initial cluster state // Get a metadata upgrade task and execute it on the initial cluster state
ClusterState newState = service.getTask().execute(clusterState); ClusterState newState = executeTask(clusterState);
IndexMetadata result = newState.metadata().getProject().index(SYSTEM_INDEX_NAME); IndexMetadata result = newState.metadata().getProject().index(SYSTEM_INDEX_NAME);
assertThat(result.isSystem(), equalTo(true)); assertThat(result.isSystem(), equalTo(true));
@ -237,8 +343,9 @@ public class SystemIndexMetadataUpgradeServiceTests extends ESTestCase {
.customs(Map.of()) .customs(Map.of())
.build(); .build();
service.submitUpdateTask(Collections.singletonList(visibleAliasMetadata.getIndex()), Collections.emptyList());
// Get a metadata upgrade task and execute it on the initial cluster state // Get a metadata upgrade task and execute it on the initial cluster state
ClusterState newState = service.getTask().execute(clusterState); ClusterState newState = executeTask(clusterState);
IndexMetadata result = newState.metadata().getProject().index(SYSTEM_INDEX_NAME); IndexMetadata result = newState.metadata().getProject().index(SYSTEM_INDEX_NAME);
assertThat(result.isSystem(), equalTo(true)); assertThat(result.isSystem(), equalTo(true));

View file

@ -11,7 +11,6 @@ package org.elasticsearch.common.unit;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersion;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.AbstractWireSerializingTestCase;
@ -22,6 +21,10 @@ import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.function.Function; import java.util.function.Function;
import static org.elasticsearch.TransportVersions.BYTE_SIZE_VALUE_ALWAYS_USES_BYTES;
import static org.elasticsearch.TransportVersions.BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_90;
import static org.elasticsearch.TransportVersions.INITIAL_ELASTICSEARCH_9_0;
import static org.elasticsearch.TransportVersions.V_8_16_0;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
@ -520,44 +523,44 @@ public class ByteSizeValueTests extends AbstractWireSerializingTestCase<ByteSize
public void testBWCTransportFormat() throws IOException { public void testBWCTransportFormat() throws IOException {
var tenMegs = ByteSizeValue.ofMb(10); var tenMegs = ByteSizeValue.ofMb(10);
try (BytesStreamOutput expected = new BytesStreamOutput(); BytesStreamOutput actual = new BytesStreamOutput()) { for (var tv : List.of(V_8_16_0, INITIAL_ELASTICSEARCH_9_0)) {
expected.writeZLong(10); try (BytesStreamOutput expected = new BytesStreamOutput(); BytesStreamOutput actual = new BytesStreamOutput()) {
ByteSizeUnit.MB.writeTo(expected); expected.writeZLong(10);
actual.setTransportVersion(TransportVersions.V_8_16_0); ByteSizeUnit.MB.writeTo(expected);
tenMegs.writeTo(actual); actual.setTransportVersion(tv);
assertArrayEquals( tenMegs.writeTo(actual);
"Size denominated in the desired unit for backward compatibility", assertArrayEquals(
expected.bytes().array(), "Size denominated in the desired unit for backward compatibility",
actual.bytes().array() expected.bytes().array(),
); actual.bytes().array()
);
}
} }
} }
/** public void testTransportRoundTripsWithTwoDigitFractions() throws IOException {
* @see TransportVersions#REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 for (var tv : List.of(TransportVersion.current(), BYTE_SIZE_VALUE_ALWAYS_USES_BYTES, BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_90)) {
*/ for (var desiredUnit : ByteSizeUnit.values()) {
@AwaitsFix(bugUrl = "https://elasticco.atlassian.net/browse/ES-10585") if (desiredUnit == ByteSizeUnit.BYTES) {
public void testTwoDigitTransportRoundTrips() throws IOException { // Can't have a fraction of a byte!
TransportVersion tv = TransportVersion.current(); continue;
for (var desiredUnit : ByteSizeUnit.values()) { }
if (desiredUnit == ByteSizeUnit.BYTES) { checkTransportRoundTrip(ByteSizeValue.parseBytesSizeValue("23" + desiredUnit.getSuffix(), "test"), tv);
continue; for (int tenths = 1; tenths <= 9; tenths++) {
} checkTransportRoundTrip(ByteSizeValue.parseBytesSizeValue("23." + tenths + desiredUnit.getSuffix(), "test"), tv);
checkTransportRoundTrip(ByteSizeValue.parseBytesSizeValue("23" + desiredUnit.getSuffix(), "test"), tv); for (int hundredths = 1; hundredths <= 9; hundredths++) {
for (int tenths = 1; tenths <= 9; tenths++) { checkTransportRoundTrip(
checkTransportRoundTrip(ByteSizeValue.parseBytesSizeValue("23." + tenths + desiredUnit.getSuffix(), "test"), tv); ByteSizeValue.parseBytesSizeValue("23." + tenths + hundredths + desiredUnit.getSuffix(), "test"),
for (int hundredths = 1; hundredths <= 9; hundredths++) { tv
checkTransportRoundTrip( );
ByteSizeValue.parseBytesSizeValue("23." + tenths + hundredths + desiredUnit.getSuffix(), "test"), }
tv
);
} }
} }
} }
} }
public void testIntegerTransportRoundTrips() throws IOException { public void testIntegerTransportRoundTrips() throws IOException {
for (var tv : List.of(TransportVersion.current(), TransportVersions.V_8_16_0)) { for (var tv : List.of(TransportVersion.current(), V_8_16_0)) {
checkTransportRoundTrip(ByteSizeValue.ONE, tv); checkTransportRoundTrip(ByteSizeValue.ONE, tv);
checkTransportRoundTrip(ByteSizeValue.ZERO, tv); checkTransportRoundTrip(ByteSizeValue.ZERO, tv);
checkTransportRoundTrip(ByteSizeValue.MINUS_ONE, tv); checkTransportRoundTrip(ByteSizeValue.MINUS_ONE, tv);

View file

@ -19,6 +19,7 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodeUtils;
import org.elasticsearch.cluster.routing.allocation.shards.ShardsAvailabilityHealthIndicatorService;
import org.elasticsearch.cluster.routing.allocation.shards.ShardsAvailabilityHealthIndicatorServiceTests; import org.elasticsearch.cluster.routing.allocation.shards.ShardsAvailabilityHealthIndicatorServiceTests;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -28,6 +29,7 @@ import org.elasticsearch.common.scheduler.SchedulerEngine;
import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.TimeValue;
import org.elasticsearch.health.node.DiskHealthIndicatorService;
import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.TelemetryProvider;
import org.elasticsearch.telemetry.metric.LongGaugeMetric; import org.elasticsearch.telemetry.metric.LongGaugeMetric;
import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.telemetry.metric.MeterRegistry;
@ -51,9 +53,12 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiConsumer; import java.util.function.BiConsumer;
import java.util.function.Consumer; import java.util.function.Consumer;
import static org.elasticsearch.cluster.routing.allocation.shards.ShardsAvailabilityHealthIndicatorService.PRIMARY_UNASSIGNED_IMPACT_ID;
import static org.elasticsearch.cluster.routing.allocation.shards.ShardsAvailabilityHealthIndicatorService.REPLICA_UNASSIGNED_IMPACT_ID;
import static org.elasticsearch.health.HealthStatus.GREEN; import static org.elasticsearch.health.HealthStatus.GREEN;
import static org.elasticsearch.health.HealthStatus.RED; import static org.elasticsearch.health.HealthStatus.RED;
import static org.elasticsearch.health.HealthStatus.YELLOW; import static org.elasticsearch.health.HealthStatus.YELLOW;
import static org.elasticsearch.health.node.DiskHealthIndicatorService.IMPACT_INGEST_UNAVAILABLE_ID;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
@ -125,9 +130,9 @@ public class HealthPeriodicLoggerTests extends ESTestCase {
Map<String, Object> loggerResults = HealthPeriodicLogger.convertToLoggedFields(results); Map<String, Object> loggerResults = HealthPeriodicLogger.convertToLoggedFields(results);
// verify that the number of fields is the number of indicators + 4 // verify that the number of fields is the number of indicators + 7
// (for overall and for message, plus details for the two yellow indicators) // (for overall and for message, plus details for the two yellow indicators, plus three impact)
assertThat(loggerResults.size(), equalTo(results.size() + 4)); assertThat(loggerResults.size(), equalTo(results.size() + 7));
// test indicator status // test indicator status
assertThat(loggerResults.get(makeHealthStatusString("master_is_stable")), equalTo("green")); assertThat(loggerResults.get(makeHealthStatusString("master_is_stable")), equalTo("green"));
@ -165,6 +170,17 @@ public class HealthPeriodicLoggerTests extends ESTestCase {
equalTo(String.format(Locale.ROOT, "health=%s [disk,shards_availability]", overallStatus.xContentValue())) equalTo(String.format(Locale.ROOT, "health=%s [disk,shards_availability]", overallStatus.xContentValue()))
); );
// test impact
assertThat(loggerResults.get(makeHealthImpactString(DiskHealthIndicatorService.NAME, IMPACT_INGEST_UNAVAILABLE_ID)), equalTo(true));
assertThat(
loggerResults.get(makeHealthImpactString(ShardsAvailabilityHealthIndicatorService.NAME, PRIMARY_UNASSIGNED_IMPACT_ID)),
equalTo(true)
);
assertThat(
loggerResults.get(makeHealthImpactString(ShardsAvailabilityHealthIndicatorService.NAME, REPLICA_UNASSIGNED_IMPACT_ID)),
equalTo(true)
);
// test empty results // test empty results
{ {
List<HealthIndicatorResult> empty = new ArrayList<>(); List<HealthIndicatorResult> empty = new ArrayList<>();
@ -793,7 +809,15 @@ public class HealthPeriodicLoggerTests extends ESTestCase {
1 1
) )
), ),
null, List.of(
new HealthIndicatorImpact(
DiskHealthIndicatorService.NAME,
IMPACT_INGEST_UNAVAILABLE_ID,
2,
"description",
List.of(ImpactArea.INGEST)
)
),
null null
); );
var shardsAvailable = new HealthIndicatorResult( var shardsAvailable = new HealthIndicatorResult(
@ -801,7 +825,22 @@ public class HealthPeriodicLoggerTests extends ESTestCase {
YELLOW, YELLOW,
null, null,
new SimpleHealthIndicatorDetails(ShardsAvailabilityHealthIndicatorServiceTests.addDefaults(Map.of())), new SimpleHealthIndicatorDetails(ShardsAvailabilityHealthIndicatorServiceTests.addDefaults(Map.of())),
null, List.of(
new HealthIndicatorImpact(
ShardsAvailabilityHealthIndicatorService.NAME,
PRIMARY_UNASSIGNED_IMPACT_ID,
2,
"description",
List.of(ImpactArea.SEARCH)
),
new HealthIndicatorImpact(
ShardsAvailabilityHealthIndicatorService.NAME,
REPLICA_UNASSIGNED_IMPACT_ID,
2,
"description",
List.of(ImpactArea.SEARCH)
)
),
null null
); );
@ -846,6 +885,10 @@ public class HealthPeriodicLoggerTests extends ESTestCase {
return String.format(Locale.ROOT, "%s.%s.details", HealthPeriodicLogger.HEALTH_FIELD_PREFIX, key); return String.format(Locale.ROOT, "%s.%s.details", HealthPeriodicLogger.HEALTH_FIELD_PREFIX, key);
} }
private String makeHealthImpactString(String indicatorName, String impact) {
return String.format(Locale.ROOT, "%s.%s.%s.impacted", HealthPeriodicLogger.HEALTH_FIELD_PREFIX, indicatorName, impact);
}
private HealthPeriodicLogger createAndInitHealthPeriodicLogger( private HealthPeriodicLogger createAndInitHealthPeriodicLogger(
ClusterService clusterService, ClusterService clusterService,
HealthService testHealthService, HealthService testHealthService,

View file

@ -17,10 +17,8 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchShardsRequest; import org.elasticsearch.action.search.SearchShardsRequest;
import org.elasticsearch.action.search.SearchShardsResponse; import org.elasticsearch.action.search.SearchShardsResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.search.TransportSearchAction;
import org.elasticsearch.action.search.TransportSearchShardsAction; import org.elasticsearch.action.search.TransportSearchShardsAction;
import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.PlainActionFuture;
@ -46,7 +44,7 @@ import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.mocksocket.MockServerSocket; import org.elasticsearch.mocksocket.MockServerSocket;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.TestThreadPool;
@ -157,26 +155,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
} else { } else {
searchHits = SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN); searchHits = SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN);
} }
try ( try (var searchResponseRef = ReleasableRef.of(SearchResponseUtils.successfulResponse(searchHits))) {
var searchResponseRef = ReleasableRef.of(
new SearchResponse(
searchHits,
InternalAggregations.EMPTY,
null,
false,
null,
null,
1,
null,
1,
1,
0,
100,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
)
)
) {
channel.sendResponse(searchResponseRef.get()); channel.sendResponse(searchResponseRef.get());
} }
} }

View file

@ -15,6 +15,7 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchResponse.Clusters;
import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.client.Response; import org.elasticsearch.client.Response;
import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata;
@ -22,6 +23,7 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.common.xcontent.XContentParserUtils;
@ -76,6 +78,157 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr
public enum SearchResponseUtils { public enum SearchResponseUtils {
; ;
public static SearchResponseBuilder response() {
return new SearchResponseBuilder();
}
public static SearchResponseBuilder response(SearchHits hits) {
return new SearchResponseBuilder().searchHits(hits).numReducePhases(1).shards(1, 1, 0).tookInMillis(100);
}
public static SearchResponse successfulResponse(SearchHits hits) {
return response(hits).build();
}
public static SearchResponse emptyWithTotalHits(
String scrollId,
int totalShards,
int successfulShards,
int skippedShards,
long tookInMillis,
ShardSearchFailure[] shardFailures,
SearchResponse.Clusters clusters
) {
return new SearchResponse(
SearchHits.EMPTY_WITH_TOTAL_HITS,
null,
null,
false,
null,
null,
1,
scrollId,
totalShards,
successfulShards,
skippedShards,
tookInMillis,
shardFailures,
clusters
);
}
public static class SearchResponseBuilder {
private SearchHits searchHits = SearchHits.empty(Lucene.TOTAL_HITS_EQUAL_TO_ZERO, Float.NaN);
private InternalAggregations aggregations;
private Suggest suggest;
private boolean timedOut;
private Boolean terminatedEarly;
private SearchProfileResults profileResults;
private int numReducePhases;
private String scrollId;
private int totalShards;
private int successfulShards;
private int skippedShards;
private long tookInMillis;
private List<ShardSearchFailure> shardFailures;
private Clusters clusters = Clusters.EMPTY;
private BytesReference pointInTimeId;
private SearchResponseBuilder() {}
public SearchResponseBuilder searchHits(SearchHits searchHits) {
this.searchHits = searchHits;
return this;
}
public SearchResponseBuilder aggregations(InternalAggregations aggregations) {
this.aggregations = aggregations;
return this;
}
public SearchResponseBuilder suggest(Suggest suggest) {
this.suggest = suggest;
return this;
}
public SearchResponseBuilder timedOut(boolean timedOut) {
this.timedOut = timedOut;
return this;
}
public SearchResponseBuilder terminatedEarly(Boolean terminatedEarly) {
this.terminatedEarly = terminatedEarly;
return this;
}
public SearchResponseBuilder profileResults(SearchProfileResults profileResults) {
this.profileResults = profileResults;
return this;
}
public SearchResponseBuilder numReducePhases(int numReducePhases) {
this.numReducePhases = numReducePhases;
return this;
}
public SearchResponseBuilder scrollId(String scrollId) {
this.scrollId = scrollId;
return this;
}
public SearchResponseBuilder shards(int total, int successful, int skipped) {
this.totalShards = total;
this.successfulShards = successful;
this.skippedShards = skipped;
return this;
}
public SearchResponseBuilder tookInMillis(long tookInMillis) {
this.tookInMillis = tookInMillis;
return this;
}
public SearchResponseBuilder shardFailures(ShardSearchFailure... failures) {
shardFailures = List.of(failures);
return this;
}
public SearchResponseBuilder shardFailures(List<ShardSearchFailure> failures) {
shardFailures = List.copyOf(failures);
return this;
}
public SearchResponseBuilder clusters(Clusters clusters) {
this.clusters = clusters;
return this;
}
public SearchResponseBuilder pointInTimeId(BytesReference pointInTimeId) {
this.pointInTimeId = pointInTimeId;
return this;
}
public SearchResponse build() {
return new SearchResponse(
searchHits,
aggregations,
suggest,
timedOut,
terminatedEarly,
profileResults,
numReducePhases,
scrollId,
totalShards,
successfulShards,
skippedShards,
tookInMillis,
shardFailures == null ? ShardSearchFailure.EMPTY_ARRAY : shardFailures.toArray(ShardSearchFailure[]::new),
clusters,
pointInTimeId
);
}
}
// All fields on the root level of the parsed SearchHit are interpreted as metadata fields // All fields on the root level of the parsed SearchHit are interpreted as metadata fields
// public because we use it in a completion suggestion option // public because we use it in a completion suggestion option
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -110,33 +263,6 @@ public enum SearchResponseUtils {
} }
} }
public static SearchResponse emptyWithTotalHits(
String scrollId,
int totalShards,
int successfulShards,
int skippedShards,
long tookInMillis,
ShardSearchFailure[] shardFailures,
SearchResponse.Clusters clusters
) {
return new SearchResponse(
SearchHits.EMPTY_WITH_TOTAL_HITS,
null,
null,
false,
null,
null,
1,
scrollId,
totalShards,
successfulShards,
skippedShards,
tookInMillis,
shardFailures,
clusters
);
}
public static SearchResponse parseSearchResponse(XContentParser parser) throws IOException { public static SearchResponse parseSearchResponse(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
parser.nextToken(); parser.nextToken();

View file

@ -1,7 +1,7 @@
# "version" holds the version of the templates and ingest pipelines installed # "version" holds the version of the templates and ingest pipelines installed
# by xpack-plugin apm-data. This must be increased whenever an existing template or # by xpack-plugin apm-data. This must be increased whenever an existing template or
# pipeline is changed, in order for it to be updated on Elasticsearch upgrade. # pipeline is changed, in order for it to be updated on Elasticsearch upgrade.
version: 12 version: 13
component-templates: component-templates:
# Data lifecycle. # Data lifecycle.

View file

@ -23,6 +23,7 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalAggregations;
@ -472,22 +473,10 @@ public class AsyncSearchTaskTests extends ESTestCase {
int skippedShards, int skippedShards,
ShardSearchFailure... failures ShardSearchFailure... failures
) { ) {
return new SearchResponse( return SearchResponseUtils.response(SearchHits.EMPTY_WITH_TOTAL_HITS)
SearchHits.EMPTY_WITH_TOTAL_HITS, .shards(totalShards, successfulShards, skippedShards)
InternalAggregations.EMPTY, .shardFailures(failures)
null, .build();
false,
null,
null,
1,
null,
totalShards,
successfulShards,
skippedShards,
100,
failures,
SearchResponse.Clusters.EMPTY
);
} }
private static void assertCompletionListeners( private static void assertCompletionListeners(

View file

@ -7,12 +7,8 @@
package org.elasticsearch.xpack.core.inference.results; package org.elasticsearch.xpack.core.inference.results;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.inference.ChunkedInference; import org.elasticsearch.inference.ChunkedInference;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ml.search.WeightedToken;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -21,7 +17,7 @@ import java.util.List;
import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings; import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings;
public record ChunkedInferenceEmbeddingSparse(List<SparseEmbeddingChunk> chunks) implements ChunkedInference { public record ChunkedInferenceEmbedding(List<? extends EmbeddingResults.Chunk> chunks) implements ChunkedInference {
public static List<ChunkedInference> listOf(List<String> inputs, SparseEmbeddingResults sparseEmbeddingResults) { public static List<ChunkedInference> listOf(List<String> inputs, SparseEmbeddingResults sparseEmbeddingResults) {
validateInputSizeAgainstEmbeddings(inputs, sparseEmbeddingResults.embeddings().size()); validateInputSizeAgainstEmbeddings(inputs, sparseEmbeddingResults.embeddings().size());
@ -29,9 +25,9 @@ public record ChunkedInferenceEmbeddingSparse(List<SparseEmbeddingChunk> chunks)
var results = new ArrayList<ChunkedInference>(inputs.size()); var results = new ArrayList<ChunkedInference>(inputs.size());
for (int i = 0; i < inputs.size(); i++) { for (int i = 0; i < inputs.size(); i++) {
results.add( results.add(
new ChunkedInferenceEmbeddingSparse( new ChunkedInferenceEmbedding(
List.of( List.of(
new SparseEmbeddingChunk( new SparseEmbeddingResults.Chunk(
sparseEmbeddingResults.embeddings().get(i).tokens(), sparseEmbeddingResults.embeddings().get(i).tokens(),
inputs.get(i), inputs.get(i),
new TextOffset(0, inputs.get(i).length()) new TextOffset(0, inputs.get(i).length())
@ -47,21 +43,9 @@ public record ChunkedInferenceEmbeddingSparse(List<SparseEmbeddingChunk> chunks)
@Override @Override
public Iterator<Chunk> chunksAsMatchedTextAndByteReference(XContent xcontent) throws IOException { public Iterator<Chunk> chunksAsMatchedTextAndByteReference(XContent xcontent) throws IOException {
var asChunk = new ArrayList<Chunk>(); var asChunk = new ArrayList<Chunk>();
for (var chunk : chunks) { for (var chunk : chunks()) {
asChunk.add(new Chunk(chunk.matchedText(), chunk.offset(), toBytesReference(xcontent, chunk.weightedTokens()))); asChunk.add(chunk.toChunk(xcontent));
} }
return asChunk.iterator(); return asChunk.iterator();
} }
private static BytesReference toBytesReference(XContent xContent, List<WeightedToken> tokens) throws IOException {
XContentBuilder b = XContentBuilder.builder(xContent);
b.startObject();
for (var weightedToken : tokens) {
weightedToken.toXContent(b, ToXContent.EMPTY_PARAMS);
}
b.endObject();
return BytesReference.bytes(b);
}
public record SparseEmbeddingChunk(List<WeightedToken> weightedTokens, String matchedText, TextOffset offset) {}
} }

View file

@ -1,45 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.inference.results;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.inference.ChunkedInference;
import org.elasticsearch.xcontent.XContent;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public record ChunkedInferenceEmbeddingByte(List<ChunkedInferenceEmbeddingByte.ByteEmbeddingChunk> chunks) implements ChunkedInference {
@Override
public Iterator<Chunk> chunksAsMatchedTextAndByteReference(XContent xcontent) throws IOException {
var asChunk = new ArrayList<Chunk>();
for (var chunk : chunks) {
asChunk.add(new Chunk(chunk.matchedText(), chunk.offset(), toBytesReference(xcontent, chunk.embedding())));
}
return asChunk.iterator();
}
/**
* Serialises the {@code value} array, according to the provided {@link XContent}, into a {@link BytesReference}.
*/
private static BytesReference toBytesReference(XContent xContent, byte[] value) throws IOException {
XContentBuilder builder = XContentBuilder.builder(xContent);
builder.startArray();
for (byte v : value) {
builder.value(v);
}
builder.endArray();
return BytesReference.bytes(builder);
}
public record ByteEmbeddingChunk(byte[] embedding, String matchedText, TextOffset offset) {}
}

View file

@ -1,45 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.inference.results;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.inference.ChunkedInference;
import org.elasticsearch.xcontent.XContent;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public record ChunkedInferenceEmbeddingFloat(List<FloatEmbeddingChunk> chunks) implements ChunkedInference {
@Override
public Iterator<Chunk> chunksAsMatchedTextAndByteReference(XContent xcontent) throws IOException {
var asChunk = new ArrayList<Chunk>();
for (var chunk : chunks) {
asChunk.add(new Chunk(chunk.matchedText(), chunk.offset(), toBytesReference(xcontent, chunk.embedding())));
}
return asChunk.iterator();
}
/**
* Serialises the {@code value} array, according to the provided {@link XContent}, into a {@link BytesReference}.
*/
private static BytesReference toBytesReference(XContent xContent, float[] value) throws IOException {
XContentBuilder b = XContentBuilder.builder(xContent);
b.startArray();
for (float v : value) {
b.value(v);
}
b.endArray();
return BytesReference.bytes(b);
}
public record FloatEmbeddingChunk(float[] embedding, String matchedText, TextOffset offset) {}
}

View file

@ -1,12 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.inference.results;
public interface EmbeddingInt {
int getSize();
}

View file

@ -0,0 +1,51 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.inference.results;
import org.elasticsearch.inference.ChunkedInference;
import org.elasticsearch.inference.InferenceServiceResults;
import org.elasticsearch.xcontent.XContent;
import java.io.IOException;
import java.util.List;
/**
* The results of a call to the inference service that contains embeddings (sparse or dense).
* A call to the inference service may contain multiple input texts, so this results may
* contain multiple results.
*/
public interface EmbeddingResults<C extends EmbeddingResults.Chunk, E extends EmbeddingResults.Embedding<C>>
extends
InferenceServiceResults {
/**
* A resulting embedding together with its input text.
*/
interface Chunk {
ChunkedInference.Chunk toChunk(XContent xcontent) throws IOException;
String matchedText();
ChunkedInference.TextOffset offset();
}
/**
* A resulting embedding for one of the input texts to the inference service.
*/
interface Embedding<C extends Chunk> {
/**
* Combines the resulting embedding with the input into a chunk.
*/
C toChunk(String text, ChunkedInference.TextOffset offset);
}
/**
* The resulting list of embeddings for the input texts to the inference service.
*/
List<E> embeddings();
}

View file

@ -1,95 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*
* this file was contributed to by a generative AI
*/
package org.elasticsearch.xpack.core.inference.results;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
public record InferenceByteEmbedding(byte[] values) implements Writeable, ToXContentObject, EmbeddingInt {
public static final String EMBEDDING = "embedding";
public InferenceByteEmbedding(StreamInput in) throws IOException {
this(in.readByteArray());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeByteArray(values);
}
public static InferenceByteEmbedding of(List<Byte> embeddingValuesList) {
byte[] embeddingValues = new byte[embeddingValuesList.size()];
for (int i = 0; i < embeddingValuesList.size(); i++) {
embeddingValues[i] = embeddingValuesList.get(i);
}
return new InferenceByteEmbedding(embeddingValues);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.startArray(EMBEDDING);
for (byte value : values) {
builder.value(value);
}
builder.endArray();
builder.endObject();
return builder;
}
@Override
public String toString() {
return Strings.toString(this);
}
float[] toFloatArray() {
float[] floatArray = new float[values.length];
for (int i = 0; i < values.length; i++) {
floatArray[i] = ((Byte) values[i]).floatValue();
}
return floatArray;
}
double[] toDoubleArray() {
double[] doubleArray = new double[values.length];
for (int i = 0; i < values.length; i++) {
doubleArray[i] = ((Byte) values[i]).doubleValue();
}
return doubleArray;
}
@Override
public int getSize() {
return values().length;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
InferenceByteEmbedding embedding = (InferenceByteEmbedding) o;
return Arrays.equals(values, embedding.values);
}
@Override
public int hashCode() {
return Arrays.hashCode(values);
}
}

View file

@ -1,109 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*
* this file was contributed to by a generative AI
*/
package org.elasticsearch.xpack.core.inference.results;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.inference.InferenceResults;
import org.elasticsearch.inference.InferenceServiceResults;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Writes a text embedding result in the follow json format
* {
* "text_embedding_bytes": [
* {
* "embedding": [
* 23
* ]
* },
* {
* "embedding": [
* -23
* ]
* }
* ]
* }
*/
public record InferenceTextEmbeddingByteResults(List<InferenceByteEmbedding> embeddings) implements InferenceServiceResults, TextEmbedding {
public static final String NAME = "text_embedding_service_byte_results";
public static final String TEXT_EMBEDDING_BYTES = "text_embedding_bytes";
public InferenceTextEmbeddingByteResults(StreamInput in) throws IOException {
this(in.readCollectionAsList(InferenceByteEmbedding::new));
}
@Override
public int getFirstEmbeddingSize() {
return TextEmbeddingUtils.getFirstEmbeddingSize(new ArrayList<>(embeddings));
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return ChunkedToXContentHelper.array(TEXT_EMBEDDING_BYTES, embeddings.iterator());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeCollection(embeddings);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public List<? extends InferenceResults> transformToCoordinationFormat() {
return embeddings.stream()
.map(embedding -> new MlTextEmbeddingResults(TEXT_EMBEDDING_BYTES, embedding.toDoubleArray(), false))
.toList();
}
@Override
@SuppressWarnings("deprecation")
public List<? extends InferenceResults> transformToLegacyFormat() {
var legacyEmbedding = new LegacyTextEmbeddingResults(
embeddings.stream().map(embedding -> new LegacyTextEmbeddingResults.Embedding(embedding.toFloatArray())).toList()
);
return List.of(legacyEmbedding);
}
public Map<String, Object> asMap() {
Map<String, Object> map = new LinkedHashMap<>();
map.put(TEXT_EMBEDDING_BYTES, embeddings);
return map;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
InferenceTextEmbeddingByteResults that = (InferenceTextEmbeddingByteResults) o;
return Objects.equals(embeddings, that.embeddings);
}
@Override
public int hashCode() {
return Objects.hash(embeddings);
}
}

View file

@ -44,7 +44,7 @@ import java.util.Objects;
* *
* Legacy text embedding results represents what was returned prior to the * Legacy text embedding results represents what was returned prior to the
* {@link org.elasticsearch.TransportVersions#V_8_12_0} version. * {@link org.elasticsearch.TransportVersions#V_8_12_0} version.
* @deprecated use {@link InferenceTextEmbeddingFloatResults} instead * @deprecated use {@link TextEmbeddingFloatResults} instead
*/ */
@Deprecated @Deprecated
public record LegacyTextEmbeddingResults(List<Embedding> embeddings) implements InferenceResults { public record LegacyTextEmbeddingResults(List<Embedding> embeddings) implements InferenceResults {
@ -114,8 +114,8 @@ public record LegacyTextEmbeddingResults(List<Embedding> embeddings) implements
return Objects.hash(embeddings); return Objects.hash(embeddings);
} }
public InferenceTextEmbeddingFloatResults transformToTextEmbeddingResults() { public TextEmbeddingFloatResults transformToTextEmbeddingResults() {
return new InferenceTextEmbeddingFloatResults(this); return new TextEmbeddingFloatResults(this);
} }
public record Embedding(float[] values) implements Writeable, ToXContentObject { public record Embedding(float[] values) implements Writeable, ToXContentObject {

View file

@ -9,16 +9,18 @@ package org.elasticsearch.xpack.core.inference.results;
import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.inference.ChunkedInference;
import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceResults;
import org.elasticsearch.inference.InferenceServiceResults;
import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.TaskType;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContent;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults;
import org.elasticsearch.xpack.core.ml.search.WeightedToken; import org.elasticsearch.xpack.core.ml.search.WeightedToken;
@ -33,13 +35,15 @@ import java.util.stream.Collectors;
import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD;
public record SparseEmbeddingResults(List<Embedding> embeddings) implements InferenceServiceResults { public record SparseEmbeddingResults(List<Embedding> embeddings)
implements
EmbeddingResults<SparseEmbeddingResults.Chunk, SparseEmbeddingResults.Embedding> {
public static final String NAME = "sparse_embedding_results"; public static final String NAME = "sparse_embedding_results";
public static final String SPARSE_EMBEDDING = TaskType.SPARSE_EMBEDDING.toString(); public static final String SPARSE_EMBEDDING = TaskType.SPARSE_EMBEDDING.toString();
public SparseEmbeddingResults(StreamInput in) throws IOException { public SparseEmbeddingResults(StreamInput in) throws IOException {
this(in.readCollectionAsList(Embedding::new)); this(in.readCollectionAsList(SparseEmbeddingResults.Embedding::new));
} }
public static SparseEmbeddingResults of(List<? extends InferenceResults> results) { public static SparseEmbeddingResults of(List<? extends InferenceResults> results) {
@ -47,7 +51,9 @@ public record SparseEmbeddingResults(List<Embedding> embeddings) implements Infe
for (InferenceResults result : results) { for (InferenceResults result : results) {
if (result instanceof TextExpansionResults expansionResults) { if (result instanceof TextExpansionResults expansionResults) {
embeddings.add(Embedding.create(expansionResults.getWeightedTokens(), expansionResults.isTruncated())); embeddings.add(
SparseEmbeddingResults.Embedding.create(expansionResults.getWeightedTokens(), expansionResults.isTruncated())
);
} else if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults errorResult) { } else if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults errorResult) {
if (errorResult.getException() instanceof ElasticsearchStatusException statusException) { if (errorResult.getException() instanceof ElasticsearchStatusException statusException) {
throw statusException; throw statusException;
@ -87,7 +93,7 @@ public record SparseEmbeddingResults(List<Embedding> embeddings) implements Infe
public Map<String, Object> asMap() { public Map<String, Object> asMap() {
Map<String, Object> map = new LinkedHashMap<>(); Map<String, Object> map = new LinkedHashMap<>();
var embeddingList = embeddings.stream().map(Embedding::asMap).toList(); var embeddingList = embeddings.stream().map(SparseEmbeddingResults.Embedding::asMap).toList();
map.put(SPARSE_EMBEDDING, embeddingList); map.put(SPARSE_EMBEDDING, embeddingList);
return map; return map;
@ -114,7 +120,11 @@ public record SparseEmbeddingResults(List<Embedding> embeddings) implements Infe
.toList(); .toList();
} }
public record Embedding(List<WeightedToken> tokens, boolean isTruncated) implements Writeable, ToXContentObject { public record Embedding(List<WeightedToken> tokens, boolean isTruncated)
implements
Writeable,
ToXContentObject,
EmbeddingResults.Embedding<Chunk> {
public static final String EMBEDDING = "embedding"; public static final String EMBEDDING = "embedding";
public static final String IS_TRUNCATED = "is_truncated"; public static final String IS_TRUNCATED = "is_truncated";
@ -163,5 +173,29 @@ public record SparseEmbeddingResults(List<Embedding> embeddings) implements Infe
public String toString() { public String toString() {
return Strings.toString(this); return Strings.toString(this);
} }
@Override
public Chunk toChunk(String text, ChunkedInference.TextOffset offset) {
return new Chunk(tokens, text, offset);
}
}
public record Chunk(List<WeightedToken> weightedTokens, String matchedText, ChunkedInference.TextOffset offset)
implements
EmbeddingResults.Chunk {
public ChunkedInference.Chunk toChunk(XContent xcontent) throws IOException {
return new ChunkedInference.Chunk(matchedText, offset, toBytesReference(xcontent, weightedTokens));
}
private static BytesReference toBytesReference(XContent xContent, List<WeightedToken> tokens) throws IOException {
XContentBuilder b = XContentBuilder.builder(xContent);
b.startObject();
for (var weightedToken : tokens) {
weightedToken.toXContent(b, ToXContent.EMPTY_PARAMS);
}
b.endObject();
return BytesReference.bytes(b);
}
} }
} }

View file

@ -13,12 +13,10 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceResults;
import org.elasticsearch.inference.InferenceServiceResults;
import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
@ -42,17 +40,22 @@ import java.util.Objects;
* ] * ]
* } * }
*/ */
public record InferenceTextEmbeddingBitResults(List<InferenceByteEmbedding> embeddings) implements InferenceServiceResults, TextEmbedding { public record TextEmbeddingBitResults(List<TextEmbeddingByteResults.Embedding> embeddings)
implements
TextEmbeddingResults<TextEmbeddingByteResults.Chunk, TextEmbeddingByteResults.Embedding> {
public static final String NAME = "text_embedding_service_bit_results"; public static final String NAME = "text_embedding_service_bit_results";
public static final String TEXT_EMBEDDING_BITS = "text_embedding_bits"; public static final String TEXT_EMBEDDING_BITS = "text_embedding_bits";
public InferenceTextEmbeddingBitResults(StreamInput in) throws IOException { public TextEmbeddingBitResults(StreamInput in) throws IOException {
this(in.readCollectionAsList(InferenceByteEmbedding::new)); this(in.readCollectionAsList(TextEmbeddingByteResults.Embedding::new));
} }
@Override @Override
public int getFirstEmbeddingSize() { public int getFirstEmbeddingSize() {
return TextEmbeddingUtils.getFirstEmbeddingSize(new ArrayList<>(embeddings)); if (embeddings.isEmpty()) {
throw new IllegalStateException("Embeddings list is empty");
}
return embeddings.getFirst().values().length;
} }
@Override @Override
@ -98,7 +101,7 @@ public record InferenceTextEmbeddingBitResults(List<InferenceByteEmbedding> embe
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
InferenceTextEmbeddingBitResults that = (InferenceTextEmbeddingBitResults) o; TextEmbeddingBitResults that = (TextEmbeddingBitResults) o;
return Objects.equals(embeddings, that.embeddings); return Objects.equals(embeddings, that.embeddings);
} }

View file

@ -0,0 +1,214 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*
* this file was contributed to by a generative AI
*/
package org.elasticsearch.xpack.core.inference.results;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.inference.ChunkedInference;
import org.elasticsearch.inference.InferenceResults;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Writes a text embedding result in the follow json format
* {
* "text_embedding_bytes": [
* {
* "embedding": [
* 23
* ]
* },
* {
* "embedding": [
* -23
* ]
* }
* ]
* }
*/
public record TextEmbeddingByteResults(List<Embedding> embeddings)
implements
TextEmbeddingResults<TextEmbeddingByteResults.Chunk, TextEmbeddingByteResults.Embedding> {
public static final String NAME = "text_embedding_service_byte_results";
public static final String TEXT_EMBEDDING_BYTES = "text_embedding_bytes";
public TextEmbeddingByteResults(StreamInput in) throws IOException {
this(in.readCollectionAsList(TextEmbeddingByteResults.Embedding::new));
}
@Override
public int getFirstEmbeddingSize() {
if (embeddings.isEmpty()) {
throw new IllegalStateException("Embeddings list is empty");
}
return embeddings.getFirst().values().length;
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return ChunkedToXContentHelper.array(TEXT_EMBEDDING_BYTES, embeddings.iterator());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeCollection(embeddings);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public List<? extends InferenceResults> transformToCoordinationFormat() {
return embeddings.stream()
.map(embedding -> new MlTextEmbeddingResults(TEXT_EMBEDDING_BYTES, embedding.toDoubleArray(), false))
.toList();
}
@Override
@SuppressWarnings("deprecation")
public List<? extends InferenceResults> transformToLegacyFormat() {
var legacyEmbedding = new LegacyTextEmbeddingResults(
embeddings.stream().map(embedding -> new LegacyTextEmbeddingResults.Embedding(embedding.toFloatArray())).toList()
);
return List.of(legacyEmbedding);
}
public Map<String, Object> asMap() {
Map<String, Object> map = new LinkedHashMap<>();
map.put(TEXT_EMBEDDING_BYTES, embeddings);
return map;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TextEmbeddingByteResults that = (TextEmbeddingByteResults) o;
return Objects.equals(embeddings, that.embeddings);
}
@Override
public int hashCode() {
return Objects.hash(embeddings);
}
public record Embedding(byte[] values) implements Writeable, ToXContentObject, EmbeddingResults.Embedding<Chunk> {
public static final String EMBEDDING = "embedding";
public Embedding(StreamInput in) throws IOException {
this(in.readByteArray());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeByteArray(values);
}
public static Embedding of(List<Byte> embeddingValuesList) {
byte[] embeddingValues = new byte[embeddingValuesList.size()];
for (int i = 0; i < embeddingValuesList.size(); i++) {
embeddingValues[i] = embeddingValuesList.get(i);
}
return new Embedding(embeddingValues);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.startArray(EMBEDDING);
for (byte value : values) {
builder.value(value);
}
builder.endArray();
builder.endObject();
return builder;
}
@Override
public String toString() {
return Strings.toString(this);
}
float[] toFloatArray() {
float[] floatArray = new float[values.length];
for (int i = 0; i < values.length; i++) {
floatArray[i] = ((Byte) values[i]).floatValue();
}
return floatArray;
}
double[] toDoubleArray() {
double[] doubleArray = new double[values.length];
for (int i = 0; i < values.length; i++) {
doubleArray[i] = ((Byte) values[i]).doubleValue();
}
return doubleArray;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Embedding embedding = (Embedding) o;
return Arrays.equals(values, embedding.values);
}
@Override
public int hashCode() {
return Arrays.hashCode(values);
}
@Override
public Chunk toChunk(String text, ChunkedInference.TextOffset offset) {
return new Chunk(values, text, offset);
}
}
/**
* Serialises the {@code value} array, according to the provided {@link XContent}, into a {@link BytesReference}.
*/
public record Chunk(byte[] embedding, String matchedText, ChunkedInference.TextOffset offset) implements EmbeddingResults.Chunk {
public ChunkedInference.Chunk toChunk(XContent xcontent) throws IOException {
return new ChunkedInference.Chunk(matchedText, offset, toBytesReference(xcontent, embedding));
}
private static BytesReference toBytesReference(XContent xContent, byte[] value) throws IOException {
XContentBuilder builder = XContentBuilder.builder(xContent);
builder.startArray();
for (byte v : value) {
builder.value(v);
}
builder.endArray();
return BytesReference.bytes(builder);
}
}
}

View file

@ -11,16 +11,18 @@ package org.elasticsearch.xpack.core.inference.results;
import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.inference.ChunkedInference;
import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceResults;
import org.elasticsearch.inference.InferenceServiceResults;
import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.TaskType;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContent;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults;
@ -51,32 +53,31 @@ import java.util.stream.Collectors;
* ] * ]
* } * }
*/ */
public record InferenceTextEmbeddingFloatResults(List<InferenceFloatEmbedding> embeddings) public record TextEmbeddingFloatResults(List<Embedding> embeddings)
implements implements
InferenceServiceResults, TextEmbeddingResults<TextEmbeddingFloatResults.Chunk, TextEmbeddingFloatResults.Embedding> {
TextEmbedding {
public static final String NAME = "text_embedding_service_results"; public static final String NAME = "text_embedding_service_results";
public static final String TEXT_EMBEDDING = TaskType.TEXT_EMBEDDING.toString(); public static final String TEXT_EMBEDDING = TaskType.TEXT_EMBEDDING.toString();
public InferenceTextEmbeddingFloatResults(StreamInput in) throws IOException { public TextEmbeddingFloatResults(StreamInput in) throws IOException {
this(in.readCollectionAsList(InferenceFloatEmbedding::new)); this(in.readCollectionAsList(TextEmbeddingFloatResults.Embedding::new));
} }
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
InferenceTextEmbeddingFloatResults(LegacyTextEmbeddingResults legacyTextEmbeddingResults) { TextEmbeddingFloatResults(LegacyTextEmbeddingResults legacyTextEmbeddingResults) {
this( this(
legacyTextEmbeddingResults.embeddings() legacyTextEmbeddingResults.embeddings()
.stream() .stream()
.map(embedding -> new InferenceFloatEmbedding(embedding.values())) .map(embedding -> new Embedding(embedding.values()))
.collect(Collectors.toList()) .collect(Collectors.toList())
); );
} }
public static InferenceTextEmbeddingFloatResults of(List<? extends InferenceResults> results) { public static TextEmbeddingFloatResults of(List<? extends InferenceResults> results) {
List<InferenceFloatEmbedding> embeddings = new ArrayList<>(results.size()); List<Embedding> embeddings = new ArrayList<>(results.size());
for (InferenceResults result : results) { for (InferenceResults result : results) {
if (result instanceof MlTextEmbeddingResults embeddingResult) { if (result instanceof MlTextEmbeddingResults embeddingResult) {
embeddings.add(InferenceFloatEmbedding.of(embeddingResult)); embeddings.add(TextEmbeddingFloatResults.Embedding.of(embeddingResult));
} else if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults errorResult) { } else if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults errorResult) {
if (errorResult.getException() instanceof ElasticsearchStatusException statusException) { if (errorResult.getException() instanceof ElasticsearchStatusException statusException) {
throw statusException; throw statusException;
@ -93,12 +94,15 @@ public record InferenceTextEmbeddingFloatResults(List<InferenceFloatEmbedding> e
); );
} }
} }
return new InferenceTextEmbeddingFloatResults(embeddings); return new TextEmbeddingFloatResults(embeddings);
} }
@Override @Override
public int getFirstEmbeddingSize() { public int getFirstEmbeddingSize() {
return TextEmbeddingUtils.getFirstEmbeddingSize(new ArrayList<>(embeddings)); if (embeddings.isEmpty()) {
throw new IllegalStateException("Embeddings list is empty");
}
return embeddings.getFirst().values().length;
} }
@Override @Override
@ -142,7 +146,7 @@ public record InferenceTextEmbeddingFloatResults(List<InferenceFloatEmbedding> e
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
InferenceTextEmbeddingFloatResults that = (InferenceTextEmbeddingFloatResults) o; TextEmbeddingFloatResults that = (TextEmbeddingFloatResults) o;
return Objects.equals(embeddings, that.embeddings); return Objects.equals(embeddings, that.embeddings);
} }
@ -151,29 +155,24 @@ public record InferenceTextEmbeddingFloatResults(List<InferenceFloatEmbedding> e
return Objects.hash(embeddings); return Objects.hash(embeddings);
} }
public record InferenceFloatEmbedding(float[] values) implements Writeable, ToXContentObject, EmbeddingInt { public record Embedding(float[] values) implements Writeable, ToXContentObject, EmbeddingResults.Embedding<Chunk> {
public static final String EMBEDDING = "embedding"; public static final String EMBEDDING = "embedding";
public InferenceFloatEmbedding(StreamInput in) throws IOException { public Embedding(StreamInput in) throws IOException {
this(in.readFloatArray()); this(in.readFloatArray());
} }
public static InferenceFloatEmbedding of(MlTextEmbeddingResults embeddingResult) { public static Embedding of(MlTextEmbeddingResults embeddingResult) {
float[] embeddingAsArray = embeddingResult.getInferenceAsFloat(); float[] embeddingAsArray = embeddingResult.getInferenceAsFloat();
return new InferenceFloatEmbedding(embeddingAsArray); return new Embedding(embeddingAsArray);
} }
public static InferenceFloatEmbedding of(List<Float> embeddingValuesList) { public static Embedding of(List<Float> embeddingValuesList) {
float[] embeddingValues = new float[embeddingValuesList.size()]; float[] embeddingValues = new float[embeddingValuesList.size()];
for (int i = 0; i < embeddingValuesList.size(); i++) { for (int i = 0; i < embeddingValuesList.size(); i++) {
embeddingValues[i] = embeddingValuesList.get(i); embeddingValues[i] = embeddingValuesList.get(i);
} }
return new InferenceFloatEmbedding(embeddingValues); return new Embedding(embeddingValues);
}
@Override
public int getSize() {
return values.length;
} }
@Override @Override
@ -212,7 +211,7 @@ public record InferenceTextEmbeddingFloatResults(List<InferenceFloatEmbedding> e
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
InferenceFloatEmbedding embedding = (InferenceFloatEmbedding) o; Embedding embedding = (Embedding) o;
return Arrays.equals(values, embedding.values); return Arrays.equals(values, embedding.values);
} }
@ -220,5 +219,30 @@ public record InferenceTextEmbeddingFloatResults(List<InferenceFloatEmbedding> e
public int hashCode() { public int hashCode() {
return Arrays.hashCode(values); return Arrays.hashCode(values);
} }
@Override
public Chunk toChunk(String text, ChunkedInference.TextOffset offset) {
return new Chunk(values, text, offset);
}
}
public record Chunk(float[] embedding, String matchedText, ChunkedInference.TextOffset offset) implements EmbeddingResults.Chunk {
public ChunkedInference.Chunk toChunk(XContent xcontent) throws IOException {
return new ChunkedInference.Chunk(matchedText, offset, toBytesReference(xcontent, embedding));
}
/**
* Serialises the {@code value} array, according to the provided {@link XContent}, into a {@link BytesReference}.
*/
private static BytesReference toBytesReference(XContent xContent, float[] value) throws IOException {
XContentBuilder b = XContentBuilder.builder(xContent);
b.startArray();
for (float v : value) {
b.value(v);
}
b.endArray();
return BytesReference.bytes(b);
}
} }
} }

View file

@ -7,7 +7,9 @@
package org.elasticsearch.xpack.core.inference.results; package org.elasticsearch.xpack.core.inference.results;
public interface TextEmbedding { public interface TextEmbeddingResults<C extends EmbeddingResults.Chunk, E extends EmbeddingResults.Embedding<C>>
extends
EmbeddingResults<C, E> {
/** /**
* Returns the first text embedding entry in the result list's array size. * Returns the first text embedding entry in the result list's array size.

View file

@ -13,20 +13,6 @@ import java.util.List;
public class TextEmbeddingUtils { public class TextEmbeddingUtils {
/**
* Returns the first text embedding entry's array size.
* @param embeddings the list of embeddings
* @return the size of the text embedding
* @throws IllegalStateException if the list of embeddings is empty
*/
public static int getFirstEmbeddingSize(List<EmbeddingInt> embeddings) throws IllegalStateException {
if (embeddings.isEmpty()) {
throw new IllegalStateException("Embeddings list is empty");
}
return embeddings.get(0).getSize();
}
/** /**
* Throws an exception if the number of elements in the input text list is different than the results in text embedding * Throws an exception if the number of elements in the input text list is different than the results in text embedding
* response. * response.

View file

@ -18,6 +18,7 @@ import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAct
import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction;
import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction;
import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.bulk.TransportBulkAction;
@ -213,6 +214,7 @@ public class InternalUsers {
TransportCloseIndexAction.NAME, TransportCloseIndexAction.NAME,
TransportCreateIndexAction.TYPE.name(), TransportCreateIndexAction.TYPE.name(),
TransportClusterSearchShardsAction.TYPE.name(), TransportClusterSearchShardsAction.TYPE.name(),
GetSettingsAction.NAME,
TransportUpdateSettingsAction.TYPE.name(), TransportUpdateSettingsAction.TYPE.name(),
RefreshAction.NAME, RefreshAction.NAME,
ReindexAction.NAME, ReindexAction.NAME,

View file

@ -13,10 +13,10 @@ import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.TimeValue;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ExecutorBuilder;
import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.TestThreadPool;
@ -113,25 +113,7 @@ public class AsyncTwoPhaseIndexerTests extends ESTestCase {
return; return;
} }
ActionListener.respondAndRelease( ActionListener.respondAndRelease(nextPhase, SearchResponseUtils.successfulResponse(SearchHits.EMPTY_WITH_TOTAL_HITS));
nextPhase,
new SearchResponse(
SearchHits.EMPTY_WITH_TOTAL_HITS,
null,
null,
false,
null,
null,
1,
null,
1,
1,
0,
0,
ShardSearchFailure.EMPTY_ARRAY,
null
)
);
} }
@Override @Override
@ -264,25 +246,7 @@ public class AsyncTwoPhaseIndexerTests extends ESTestCase {
awaitForLatch(); awaitForLatch();
} }
ActionListener.respondAndRelease( ActionListener.respondAndRelease(nextPhase, SearchResponseUtils.successfulResponse(SearchHits.EMPTY_WITH_TOTAL_HITS));
nextPhase,
new SearchResponse(
SearchHits.EMPTY_WITH_TOTAL_HITS,
null,
null,
false,
null,
null,
1,
null,
1,
1,
0,
0,
ShardSearchFailure.EMPTY_ARRAY,
null
)
);
} }
@Override @Override

View file

@ -6,12 +6,12 @@
*/ */
package org.elasticsearch.xpack.enrich; package org.elasticsearch.xpack.enrich;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.TimeValue;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xcontent.json.JsonXContent;
@ -26,6 +26,7 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import static org.elasticsearch.action.support.ActionTestUtils.assertNoFailureListener;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
@ -122,18 +123,10 @@ public class EnrichCacheTests extends ESTestCase {
searchResponseActionListener.onResponse(searchResponse); searchResponseActionListener.onResponse(searchResponse);
searchResponse.decRef(); searchResponse.decRef();
queriedDatabaseLatch.countDown(); queriedDatabaseLatch.countDown();
}, new ActionListener<>() { }, assertNoFailureListener(response -> {
@Override assertThat(response, equalTo(searchResponseMap));
public void onResponse(List<Map<?, ?>> response) { notifiedOfResultLatch.countDown();
assertThat(response, equalTo(searchResponseMap)); }));
notifiedOfResultLatch.countDown();
}
@Override
public void onFailure(Exception e) {
fail(e);
}
});
assertThat(queriedDatabaseLatch.await(5, TimeUnit.SECONDS), equalTo(true)); assertThat(queriedDatabaseLatch.await(5, TimeUnit.SECONDS), equalTo(true));
assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true)); assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true));
EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10)); EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10));
@ -149,17 +142,7 @@ public class EnrichCacheTests extends ESTestCase {
CountDownLatch notifiedOfResultLatch = new CountDownLatch(1); CountDownLatch notifiedOfResultLatch = new CountDownLatch(1);
enrichCache.computeIfAbsent("policy1-1", "1", 1, (searchResponseActionListener) -> { enrichCache.computeIfAbsent("policy1-1", "1", 1, (searchResponseActionListener) -> {
fail("Expected no call to the database because item should have been in the cache"); fail("Expected no call to the database because item should have been in the cache");
}, new ActionListener<>() { }, assertNoFailureListener(r -> notifiedOfResultLatch.countDown()));
@Override
public void onResponse(List<Map<?, ?>> maps) {
notifiedOfResultLatch.countDown();
}
@Override
public void onFailure(Exception e) {
fail(e);
}
});
assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true)); assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true));
EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10)); EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10));
assertThat(cacheStats.count(), equalTo(1L)); assertThat(cacheStats.count(), equalTo(1L));
@ -180,22 +163,7 @@ public class EnrichCacheTests extends ESTestCase {
} }
}).toArray(SearchHit[]::new); }).toArray(SearchHit[]::new);
SearchHits hits = SearchHits.unpooled(hitArray, null, 0); SearchHits hits = SearchHits.unpooled(hitArray, null, 0);
return new SearchResponse( return SearchResponseUtils.response(hits).shards(5, 4, 0).build();
hits,
null,
null,
false,
false,
null,
1,
null,
5,
4,
0,
randomLong(),
null,
SearchResponse.Clusters.EMPTY
);
} }
private BytesReference convertMapToJson(Map<String, ?> simpleMap) throws IOException { private BytesReference convertMapToJson(Map<String, ?> simpleMap) throws IOException {

View file

@ -11,8 +11,6 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.AliasMetadata;
@ -25,9 +23,7 @@ import org.elasticsearch.index.VersionType;
import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.search.profile.SearchProfileResults;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.test.client.NoOpClient;
import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
@ -257,22 +253,7 @@ public class EnrichProcessorFactoryTests extends ESTestCase {
requestCounter[0]++; requestCounter[0]++;
ActionListener.respondAndRelease( ActionListener.respondAndRelease(
listener, listener,
(Response) new SearchResponse( (Response) SearchResponseUtils.successfulResponse(SearchHits.EMPTY_WITH_TOTAL_HITS)
SearchHits.EMPTY_WITH_TOTAL_HITS,
InternalAggregations.EMPTY,
new Suggest(Collections.emptyList()),
false,
false,
new SearchProfileResults(Collections.emptyMap()),
1,
"",
1,
1,
0,
0,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
)
); );
} }
}; };

View file

@ -7,7 +7,6 @@
package org.elasticsearch.xpack.enrich.action; package org.elasticsearch.xpack.enrich.action;
import org.apache.logging.log4j.util.BiConsumer; import org.apache.logging.log4j.util.BiConsumer;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
@ -17,13 +16,13 @@ import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.action.support.single.shard.SingleShardRequest;
import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.client.internal.ElasticsearchClient;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.core.Tuple; import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -371,22 +370,7 @@ public class CoordinatorTests extends ESTestCase {
} }
private static SearchResponse emptySearchResponse() { private static SearchResponse emptySearchResponse() {
return new SearchResponse( return SearchResponseUtils.successfulResponse(SearchHits.empty(Lucene.TOTAL_HITS_EQUAL_TO_ZERO, Float.NaN));
SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN),
InternalAggregations.EMPTY,
null,
false,
null,
null,
1,
null,
1,
1,
0,
100,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
);
} }
private class MockLookupFunction implements BiConsumer<MultiSearchRequest, BiConsumer<MultiSearchResponse, Exception>> { private class MockLookupFunction implements BiConsumer<MultiSearchRequest, BiConsumer<MultiSearchResponse, Exception>> {

View file

@ -12,12 +12,12 @@ import org.apache.lucene.search.TotalHits.Relation;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchResponse.Clusters;
import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker;
import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.TimeValue;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.search.SearchSortValues; import org.elasticsearch.search.SearchSortValues;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -82,10 +82,7 @@ public class ImplicitTiebreakerTests extends ESTestCase {
) )
); );
SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f);
ActionListener.respondAndRelease( ActionListener.respondAndRelease(l, SearchResponseUtils.successfulResponse(searchHits));
l,
new SearchResponse(searchHits, null, null, false, false, null, 0, null, 0, 1, 0, 0, null, Clusters.EMPTY)
);
} }
@Override @Override

View file

@ -13,7 +13,6 @@ import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.TotalHits.Relation; import org.apache.lucene.search.TotalHits.Relation;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchResponse.Clusters;
import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.ActionTestUtils;
import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker;
import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.document.DocumentField;
@ -22,6 +21,7 @@ import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.Tuple; import org.elasticsearch.core.Tuple;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.eql.action.EqlSearchResponse.Sequence; import org.elasticsearch.xpack.eql.action.EqlSearchResponse.Sequence;
@ -220,10 +220,7 @@ public class SequenceSpecTests extends ESTestCase {
new TotalHits(eah.hits.size(), Relation.EQUAL_TO), new TotalHits(eah.hits.size(), Relation.EQUAL_TO),
0.0f 0.0f
); );
ActionListener.respondAndRelease( ActionListener.respondAndRelease(l, SearchResponseUtils.successfulResponse(searchHits));
l,
new SearchResponse(searchHits, null, null, false, false, null, 0, null, 0, 1, 0, 0, null, Clusters.EMPTY)
);
} }
@Override @Override

View file

@ -20,7 +20,6 @@ import org.elasticsearch.action.search.OpenPointInTimeResponse;
import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchResponse.Clusters;
import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.ActionTestUtils;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
@ -44,6 +43,7 @@ import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.SearchSortValues; import org.elasticsearch.search.SearchSortValues;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
@ -114,10 +114,7 @@ public class CircuitBreakerTests extends ESTestCase {
new SearchSortValues(new Long[] { (long) ordinal, 1L }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW }) new SearchSortValues(new Long[] { (long) ordinal, 1L }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW })
); );
SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f);
ActionListener.respondAndRelease( ActionListener.respondAndRelease(l, SearchResponseUtils.successfulResponse(searchHits));
l,
new SearchResponse(searchHits, null, null, false, false, null, 0, null, 0, 1, 0, 0, null, Clusters.EMPTY)
);
} }
@Override @Override

View file

@ -221,6 +221,11 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
} }
public void testScaledFloat() throws IOException { public void testScaledFloat() throws IOException {
// Running this on 17 when nodes in cluster run JDK >17 triggers an assert due to a mismatch
// of results produced by Double#toString for some specific numbers.
// See https://github.com/elastic/elasticsearch/issues/122984.
assumeTrue("JDK version greater than 17", Runtime.version().feature() > 17);
double value = randomBoolean() ? randomDoubleBetween(-Double.MAX_VALUE, Double.MAX_VALUE, true) : randomFloat(); double value = randomBoolean() ? randomDoubleBetween(-Double.MAX_VALUE, Double.MAX_VALUE, true) : randomFloat();
// Scale factors less than about 5.6e-309 will result in NaN (due to 1/scaleFactor being infinity) // Scale factors less than about 5.6e-309 will result in NaN (due to 1/scaleFactor being infinity)
double scalingFactor = randomDoubleBetween(1e-308, Double.MAX_VALUE, false); double scalingFactor = randomDoubleBetween(1e-308, Double.MAX_VALUE, false);

View file

@ -195,14 +195,12 @@ IN: 'in';
IS: 'is'; IS: 'is';
LAST : 'last'; LAST : 'last';
LIKE: 'like'; LIKE: 'like';
LP : '(';
NOT : 'not'; NOT : 'not';
NULL : 'null'; NULL : 'null';
NULLS : 'nulls'; NULLS : 'nulls';
OR : 'or'; OR : 'or';
PARAM: '?'; PARAM: '?';
RLIKE: 'rlike'; RLIKE: 'rlike';
RP : ')';
TRUE : 'true'; TRUE : 'true';
EQ : '=='; EQ : '==';
@ -223,8 +221,6 @@ LEFT_BRACES : '{';
RIGHT_BRACES : '}'; RIGHT_BRACES : '}';
NESTED_WHERE : WHERE -> type(WHERE); NESTED_WHERE : WHERE -> type(WHERE);
NESTED_SORT : {this.isDevVersion()}? SORT -> type(SORT);
NESTED_LIMIT : {this.isDevVersion()}? LIMIT -> type(LIMIT);
NAMED_OR_POSITIONAL_PARAM NAMED_OR_POSITIONAL_PARAM
: PARAM (LETTER | UNDERSCORE) UNQUOTED_ID_BODY* : PARAM (LETTER | UNDERSCORE) UNQUOTED_ID_BODY*
@ -239,6 +235,9 @@ NAMED_OR_POSITIONAL_PARAM
OPENING_BRACKET : '[' -> pushMode(EXPRESSION_MODE), pushMode(EXPRESSION_MODE); OPENING_BRACKET : '[' -> pushMode(EXPRESSION_MODE), pushMode(EXPRESSION_MODE);
CLOSING_BRACKET : ']' -> popMode, popMode; CLOSING_BRACKET : ']' -> popMode, popMode;
LP : '(' -> pushMode(EXPRESSION_MODE), pushMode(EXPRESSION_MODE);
RP : ')' -> popMode, popMode;
UNQUOTED_IDENTIFIER UNQUOTED_IDENTIFIER
: LETTER UNQUOTED_ID_BODY* : LETTER UNQUOTED_ID_BODY*
// only allow @ at beginning of identifier to keep the option to allow @ as infix operator in the future // only allow @ at beginning of identifier to keep the option to allow @ as infix operator in the future
@ -678,8 +677,8 @@ INSIST_MULTILINE_COMMENT : MULTILINE_COMMENT -> channel(HIDDEN);
// //
mode FORK_MODE; mode FORK_MODE;
FORK_LP : LP -> type(LP), pushMode(DEFAULT_MODE); FORK_LP : LP -> type(LP), pushMode(DEFAULT_MODE);
FORK_RP : RP -> type(RP), popMode;
FORK_PIPE : PIPE -> type(PIPE), popMode; FORK_PIPE : PIPE -> type(PIPE), popMode;
FORK_WS : WS -> channel(HIDDEN); FORK_WS : WS -> channel(HIDDEN);
FORK_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN); FORK_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN);
FORK_MULTILINE_COMMENT : MULTILINE_COMMENT -> channel(HIDDEN); FORK_MULTILINE_COMMENT : MULTILINE_COMMENT -> channel(HIDDEN);

View file

@ -47,32 +47,32 @@ IN=46
IS=47 IS=47
LAST=48 LAST=48
LIKE=49 LIKE=49
LP=50 NOT=50
NOT=51 NULL=51
NULL=52 NULLS=52
NULLS=53 OR=53
OR=54 PARAM=54
PARAM=55 RLIKE=55
RLIKE=56 TRUE=56
RP=57 EQ=57
TRUE=58 CIEQ=58
EQ=59 NEQ=59
CIEQ=60 LT=60
NEQ=61 LTE=61
LT=62 GT=62
LTE=63 GTE=63
GT=64 PLUS=64
GTE=65 MINUS=65
PLUS=66 ASTERISK=66
MINUS=67 SLASH=67
ASTERISK=68 PERCENT=68
SLASH=69 LEFT_BRACES=69
PERCENT=70 RIGHT_BRACES=70
LEFT_BRACES=71 NAMED_OR_POSITIONAL_PARAM=71
RIGHT_BRACES=72 OPENING_BRACKET=72
NAMED_OR_POSITIONAL_PARAM=73 CLOSING_BRACKET=73
OPENING_BRACKET=74 LP=74
CLOSING_BRACKET=75 RP=75
UNQUOTED_IDENTIFIER=76 UNQUOTED_IDENTIFIER=76
QUOTED_IDENTIFIER=77 QUOTED_IDENTIFIER=77
EXPR_LINE_COMMENT=78 EXPR_LINE_COMMENT=78
@ -173,30 +173,29 @@ FORK_MULTILINE_COMMENT=142
'is'=47 'is'=47
'last'=48 'last'=48
'like'=49 'like'=49
'('=50 'not'=50
'not'=51 'null'=51
'null'=52 'nulls'=52
'nulls'=53 'or'=53
'or'=54 '?'=54
'?'=55 'rlike'=55
'rlike'=56 'true'=56
')'=57 '=='=57
'true'=58 '=~'=58
'=='=59 '!='=59
'=~'=60 '<'=60
'!='=61 '<='=61
'<'=62 '>'=62
'<='=63 '>='=63
'>'=64 '+'=64
'>='=65 '-'=65
'+'=66 '*'=66
'-'=67 '/'=67
'*'=68 '%'=68
'/'=69 '{'=69
'%'=70 '}'=70
'{'=71 ']'=73
'}'=72 ')'=75
']'=75
'metadata'=84 'metadata'=84
'as'=93 'as'=93
'on'=97 'on'=97

View file

@ -47,32 +47,32 @@ IN=46
IS=47 IS=47
LAST=48 LAST=48
LIKE=49 LIKE=49
LP=50 NOT=50
NOT=51 NULL=51
NULL=52 NULLS=52
NULLS=53 OR=53
OR=54 PARAM=54
PARAM=55 RLIKE=55
RLIKE=56 TRUE=56
RP=57 EQ=57
TRUE=58 CIEQ=58
EQ=59 NEQ=59
CIEQ=60 LT=60
NEQ=61 LTE=61
LT=62 GT=62
LTE=63 GTE=63
GT=64 PLUS=64
GTE=65 MINUS=65
PLUS=66 ASTERISK=66
MINUS=67 SLASH=67
ASTERISK=68 PERCENT=68
SLASH=69 LEFT_BRACES=69
PERCENT=70 RIGHT_BRACES=70
LEFT_BRACES=71 NAMED_OR_POSITIONAL_PARAM=71
RIGHT_BRACES=72 OPENING_BRACKET=72
NAMED_OR_POSITIONAL_PARAM=73 CLOSING_BRACKET=73
OPENING_BRACKET=74 LP=74
CLOSING_BRACKET=75 RP=75
UNQUOTED_IDENTIFIER=76 UNQUOTED_IDENTIFIER=76
QUOTED_IDENTIFIER=77 QUOTED_IDENTIFIER=77
EXPR_LINE_COMMENT=78 EXPR_LINE_COMMENT=78
@ -173,30 +173,29 @@ FORK_MULTILINE_COMMENT=142
'is'=47 'is'=47
'last'=48 'last'=48
'like'=49 'like'=49
'('=50 'not'=50
'not'=51 'null'=51
'null'=52 'nulls'=52
'nulls'=53 'or'=53
'or'=54 '?'=54
'?'=55 'rlike'=55
'rlike'=56 'true'=56
')'=57 '=='=57
'true'=58 '=~'=58
'=='=59 '!='=59
'=~'=60 '<'=60
'!='=61 '<='=61
'<'=62 '>'=62
'<='=63 '>='=63
'>'=64 '+'=64
'>='=65 '-'=65
'+'=66 '*'=66
'-'=67 '/'=67
'*'=68 '%'=68
'/'=69 '{'=69
'%'=70 '}'=70
'{'=71 ']'=73
'}'=72 ')'=75
']'=75
'metadata'=84 'metadata'=84
'as'=93 'as'=93
'on'=97 'on'=97

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -32,21 +32,21 @@ public class EsqlBaseParser extends ParserConfig {
DEV_FORK=26, UNKNOWN_CMD=27, LINE_COMMENT=28, MULTILINE_COMMENT=29, WS=30, DEV_FORK=26, UNKNOWN_CMD=27, LINE_COMMENT=28, MULTILINE_COMMENT=29, WS=30,
PIPE=31, QUOTED_STRING=32, INTEGER_LITERAL=33, DECIMAL_LITERAL=34, BY=35, PIPE=31, QUOTED_STRING=32, INTEGER_LITERAL=33, DECIMAL_LITERAL=34, BY=35,
AND=36, ASC=37, ASSIGN=38, CAST_OP=39, COLON=40, COMMA=41, DESC=42, DOT=43, AND=36, ASC=37, ASSIGN=38, CAST_OP=39, COLON=40, COMMA=41, DESC=42, DOT=43,
FALSE=44, FIRST=45, IN=46, IS=47, LAST=48, LIKE=49, LP=50, NOT=51, NULL=52, FALSE=44, FIRST=45, IN=46, IS=47, LAST=48, LIKE=49, NOT=50, NULL=51, NULLS=52,
NULLS=53, OR=54, PARAM=55, RLIKE=56, RP=57, TRUE=58, EQ=59, CIEQ=60, NEQ=61, OR=53, PARAM=54, RLIKE=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, LTE=61,
LT=62, LTE=63, GT=64, GTE=65, PLUS=66, MINUS=67, ASTERISK=68, SLASH=69, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, LEFT_BRACES=69,
PERCENT=70, LEFT_BRACES=71, RIGHT_BRACES=72, NAMED_OR_POSITIONAL_PARAM=73, RIGHT_BRACES=70, NAMED_OR_POSITIONAL_PARAM=71, OPENING_BRACKET=72, CLOSING_BRACKET=73,
OPENING_BRACKET=74, CLOSING_BRACKET=75, UNQUOTED_IDENTIFIER=76, QUOTED_IDENTIFIER=77, LP=74, RP=75, UNQUOTED_IDENTIFIER=76, QUOTED_IDENTIFIER=77, EXPR_LINE_COMMENT=78,
EXPR_LINE_COMMENT=78, EXPR_MULTILINE_COMMENT=79, EXPR_WS=80, EXPLAIN_WS=81, EXPR_MULTILINE_COMMENT=79, EXPR_WS=80, EXPLAIN_WS=81, EXPLAIN_LINE_COMMENT=82,
EXPLAIN_LINE_COMMENT=82, EXPLAIN_MULTILINE_COMMENT=83, METADATA=84, UNQUOTED_SOURCE=85, EXPLAIN_MULTILINE_COMMENT=83, METADATA=84, UNQUOTED_SOURCE=85, FROM_LINE_COMMENT=86,
FROM_LINE_COMMENT=86, FROM_MULTILINE_COMMENT=87, FROM_WS=88, ID_PATTERN=89, FROM_MULTILINE_COMMENT=87, FROM_WS=88, ID_PATTERN=89, PROJECT_LINE_COMMENT=90,
PROJECT_LINE_COMMENT=90, PROJECT_MULTILINE_COMMENT=91, PROJECT_WS=92, PROJECT_MULTILINE_COMMENT=91, PROJECT_WS=92, AS=93, RENAME_LINE_COMMENT=94,
AS=93, RENAME_LINE_COMMENT=94, RENAME_MULTILINE_COMMENT=95, RENAME_WS=96, RENAME_MULTILINE_COMMENT=95, RENAME_WS=96, ON=97, WITH=98, ENRICH_POLICY_NAME=99,
ON=97, WITH=98, ENRICH_POLICY_NAME=99, ENRICH_LINE_COMMENT=100, ENRICH_MULTILINE_COMMENT=101, ENRICH_LINE_COMMENT=100, ENRICH_MULTILINE_COMMENT=101, ENRICH_WS=102,
ENRICH_WS=102, ENRICH_FIELD_LINE_COMMENT=103, ENRICH_FIELD_MULTILINE_COMMENT=104, ENRICH_FIELD_LINE_COMMENT=103, ENRICH_FIELD_MULTILINE_COMMENT=104, ENRICH_FIELD_WS=105,
ENRICH_FIELD_WS=105, MVEXPAND_LINE_COMMENT=106, MVEXPAND_MULTILINE_COMMENT=107, MVEXPAND_LINE_COMMENT=106, MVEXPAND_MULTILINE_COMMENT=107, MVEXPAND_WS=108,
MVEXPAND_WS=108, INFO=109, SHOW_LINE_COMMENT=110, SHOW_MULTILINE_COMMENT=111, INFO=109, SHOW_LINE_COMMENT=110, SHOW_MULTILINE_COMMENT=111, SHOW_WS=112,
SHOW_WS=112, SETTING=113, SETTING_LINE_COMMENT=114, SETTTING_MULTILINE_COMMENT=115, SETTING=113, SETTING_LINE_COMMENT=114, SETTTING_MULTILINE_COMMENT=115,
SETTING_WS=116, LOOKUP_LINE_COMMENT=117, LOOKUP_MULTILINE_COMMENT=118, SETTING_WS=116, LOOKUP_LINE_COMMENT=117, LOOKUP_MULTILINE_COMMENT=118,
LOOKUP_WS=119, LOOKUP_FIELD_LINE_COMMENT=120, LOOKUP_FIELD_MULTILINE_COMMENT=121, LOOKUP_WS=119, LOOKUP_FIELD_LINE_COMMENT=120, LOOKUP_FIELD_MULTILINE_COMMENT=121,
LOOKUP_FIELD_WS=122, JOIN=123, USING=124, JOIN_LINE_COMMENT=125, JOIN_MULTILINE_COMMENT=126, LOOKUP_FIELD_WS=122, JOIN=123, USING=124, JOIN_LINE_COMMENT=125, JOIN_MULTILINE_COMMENT=126,
@ -108,10 +108,10 @@ public class EsqlBaseParser extends ParserConfig {
"'sort'", "'stats'", "'where'", "'lookup'", null, null, null, null, null, "'sort'", "'stats'", "'where'", "'lookup'", null, null, null, null, null,
null, null, null, null, null, null, null, null, "'|'", null, null, null, null, null, null, null, null, null, null, null, "'|'", null, null, null,
"'by'", "'and'", "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'by'", "'and'", "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'",
"'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'not'", "'null'",
"'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'nulls'", "'or'", "'?'", "'rlike'", "'true'", "'=='", "'=~'", "'!='",
"'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'{'",
"'%'", "'{'", "'}'", null, null, "']'", null, null, null, null, null, "'}'", null, null, "']'", null, "')'", null, null, null, null, null,
null, null, null, "'metadata'", null, null, null, null, null, null, null, null, null, null, "'metadata'", null, null, null, null, null, null, null,
null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null,
null, null, null, null, null, null, "'info'", null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, null,
@ -128,17 +128,17 @@ public class EsqlBaseParser extends ParserConfig {
"DEV_FORK", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "DEV_FORK", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS",
"PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY",
"AND", "ASC", "ASSIGN", "CAST_OP", "COLON", "COMMA", "DESC", "DOT", "FALSE", "AND", "ASC", "ASSIGN", "CAST_OP", "COLON", "COMMA", "DESC", "DOT", "FALSE",
"FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "FIRST", "IN", "IS", "LAST", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM",
"PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "RLIKE", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS",
"GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES",
"RIGHT_BRACES", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "LP",
"UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "RP", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT",
"EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT",
"METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT",
"FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT",
"PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT",
"RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME",
"ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT",
"ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT",
"MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT",
"SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT",
@ -1557,7 +1557,7 @@ public class EsqlBaseParser extends ParserConfig {
setState(276); setState(276);
((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1);
_la = _input.LA(1); _la = _input.LA(1);
if ( !(((((_la - 68)) & ~0x3f) == 0 && ((1L << (_la - 68)) & 7L) != 0)) ) { if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) {
((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this);
} }
else { else {
@ -1901,7 +1901,6 @@ public class EsqlBaseParser extends ParserConfig {
case INTEGER_LITERAL: case INTEGER_LITERAL:
case DECIMAL_LITERAL: case DECIMAL_LITERAL:
case FALSE: case FALSE:
case LP:
case NOT: case NOT:
case NULL: case NULL:
case PARAM: case PARAM:
@ -1910,6 +1909,7 @@ public class EsqlBaseParser extends ParserConfig {
case MINUS: case MINUS:
case NAMED_OR_POSITIONAL_PARAM: case NAMED_OR_POSITIONAL_PARAM:
case OPENING_BRACKET: case OPENING_BRACKET:
case LP:
case UNQUOTED_IDENTIFIER: case UNQUOTED_IDENTIFIER:
case QUOTED_IDENTIFIER: case QUOTED_IDENTIFIER:
{ {
@ -5055,7 +5055,7 @@ public class EsqlBaseParser extends ParserConfig {
{ {
setState(596); setState(596);
_la = _input.LA(1); _la = _input.LA(1);
if ( !(((((_la - 59)) & ~0x3f) == 0 && ((1L << (_la - 59)) & 125L) != 0)) ) { if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) {
_errHandler.recoverInline(this); _errHandler.recoverInline(this);
} }
else { else {
@ -6449,8 +6449,8 @@ public class EsqlBaseParser extends ParserConfig {
"\u0002\n\u0012\u0014\u008eI\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ "\u0002\n\u0012\u0014\u008eI\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+
"\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+
"TVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086\u0088\u008a\u008c\u008e"+ "TVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086\u0088\u008a\u008c\u008e"+
"\u0090\u0000\t\u0001\u0000BC\u0001\u0000DF\u0002\u0000 UU\u0001\u0000"+ "\u0090\u0000\t\u0001\u0000@A\u0001\u0000BD\u0002\u0000 UU\u0001\u0000"+
"LM\u0002\u0000%%**\u0002\u0000--00\u0002\u0000,,::\u0002\u0000;;=A\u0002"+ "LM\u0002\u0000%%**\u0002\u0000--00\u0002\u0000,,88\u0002\u000099;?\u0002"+
"\u0000\u0011\u0011\u0018\u0019\u02dd\u0000\u0092\u0001\u0000\u0000\u0000"+ "\u0000\u0011\u0011\u0018\u0019\u02dd\u0000\u0092\u0001\u0000\u0000\u0000"+
"\u0002\u0095\u0001\u0000\u0000\u0000\u0004\u00a6\u0001\u0000\u0000\u0000"+ "\u0002\u0095\u0001\u0000\u0000\u0000\u0004\u00a6\u0001\u0000\u0000\u0000"+
"\u0006\u00bf\u0001\u0000\u0000\u0000\b\u00c1\u0001\u0000\u0000\u0000\n"+ "\u0006\u00bf\u0001\u0000\u0000\u0000\b\u00c1\u0001\u0000\u0000\u0000\n"+
@ -6518,38 +6518,38 @@ public class EsqlBaseParser extends ParserConfig {
"\u00bf\u00bb\u0001\u0000\u0000\u0000\u00bf\u00bd\u0001\u0000\u0000\u0000"+ "\u00bf\u00bb\u0001\u0000\u0000\u0000\u00bf\u00bd\u0001\u0000\u0000\u0000"+
"\u00c0\u0007\u0001\u0000\u0000\u0000\u00c1\u00c2\u0005\u0010\u0000\u0000"+ "\u00c0\u0007\u0001\u0000\u0000\u0000\u00c1\u00c2\u0005\u0010\u0000\u0000"+
"\u00c2\u00c3\u0003\n\u0005\u0000\u00c3\t\u0001\u0000\u0000\u0000\u00c4"+ "\u00c2\u00c3\u0003\n\u0005\u0000\u00c3\t\u0001\u0000\u0000\u0000\u00c4"+
"\u00c5\u0006\u0005\uffff\uffff\u0000\u00c5\u00c6\u00053\u0000\u0000\u00c6"+ "\u00c5\u0006\u0005\uffff\uffff\u0000\u00c5\u00c6\u00052\u0000\u0000\u00c6"+
"\u00e2\u0003\n\u0005\b\u00c7\u00e2\u0003\u0010\b\u0000\u00c8\u00e2\u0003"+ "\u00e2\u0003\n\u0005\b\u00c7\u00e2\u0003\u0010\b\u0000\u00c8\u00e2\u0003"+
"\f\u0006\u0000\u00c9\u00cb\u0003\u0010\b\u0000\u00ca\u00cc\u00053\u0000"+ "\f\u0006\u0000\u00c9\u00cb\u0003\u0010\b\u0000\u00ca\u00cc\u00052\u0000"+
"\u0000\u00cb\u00ca\u0001\u0000\u0000\u0000\u00cb\u00cc\u0001\u0000\u0000"+ "\u0000\u00cb\u00ca\u0001\u0000\u0000\u0000\u00cb\u00cc\u0001\u0000\u0000"+
"\u0000\u00cc\u00cd\u0001\u0000\u0000\u0000\u00cd\u00ce\u0005.\u0000\u0000"+ "\u0000\u00cc\u00cd\u0001\u0000\u0000\u0000\u00cd\u00ce\u0005.\u0000\u0000"+
"\u00ce\u00cf\u00052\u0000\u0000\u00cf\u00d4\u0003\u0010\b\u0000\u00d0"+ "\u00ce\u00cf\u0005J\u0000\u0000\u00cf\u00d4\u0003\u0010\b\u0000\u00d0"+
"\u00d1\u0005)\u0000\u0000\u00d1\u00d3\u0003\u0010\b\u0000\u00d2\u00d0"+ "\u00d1\u0005)\u0000\u0000\u00d1\u00d3\u0003\u0010\b\u0000\u00d2\u00d0"+
"\u0001\u0000\u0000\u0000\u00d3\u00d6\u0001\u0000\u0000\u0000\u00d4\u00d2"+ "\u0001\u0000\u0000\u0000\u00d3\u00d6\u0001\u0000\u0000\u0000\u00d4\u00d2"+
"\u0001\u0000\u0000\u0000\u00d4\u00d5\u0001\u0000\u0000\u0000\u00d5\u00d7"+ "\u0001\u0000\u0000\u0000\u00d4\u00d5\u0001\u0000\u0000\u0000\u00d5\u00d7"+
"\u0001\u0000\u0000\u0000\u00d6\u00d4\u0001\u0000\u0000\u0000\u00d7\u00d8"+ "\u0001\u0000\u0000\u0000\u00d6\u00d4\u0001\u0000\u0000\u0000\u00d7\u00d8"+
"\u00059\u0000\u0000\u00d8\u00e2\u0001\u0000\u0000\u0000\u00d9\u00da\u0003"+ "\u0005K\u0000\u0000\u00d8\u00e2\u0001\u0000\u0000\u0000\u00d9\u00da\u0003"+
"\u0010\b\u0000\u00da\u00dc\u0005/\u0000\u0000\u00db\u00dd\u00053\u0000"+ "\u0010\b\u0000\u00da\u00dc\u0005/\u0000\u0000\u00db\u00dd\u00052\u0000"+
"\u0000\u00dc\u00db\u0001\u0000\u0000\u0000\u00dc\u00dd\u0001\u0000\u0000"+ "\u0000\u00dc\u00db\u0001\u0000\u0000\u0000\u00dc\u00dd\u0001\u0000\u0000"+
"\u0000\u00dd\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u00054\u0000\u0000"+ "\u0000\u00dd\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u00053\u0000\u0000"+
"\u00df\u00e2\u0001\u0000\u0000\u0000\u00e0\u00e2\u0003\u000e\u0007\u0000"+ "\u00df\u00e2\u0001\u0000\u0000\u0000\u00e0\u00e2\u0003\u000e\u0007\u0000"+
"\u00e1\u00c4\u0001\u0000\u0000\u0000\u00e1\u00c7\u0001\u0000\u0000\u0000"+ "\u00e1\u00c4\u0001\u0000\u0000\u0000\u00e1\u00c7\u0001\u0000\u0000\u0000"+
"\u00e1\u00c8\u0001\u0000\u0000\u0000\u00e1\u00c9\u0001\u0000\u0000\u0000"+ "\u00e1\u00c8\u0001\u0000\u0000\u0000\u00e1\u00c9\u0001\u0000\u0000\u0000"+
"\u00e1\u00d9\u0001\u0000\u0000\u0000\u00e1\u00e0\u0001\u0000\u0000\u0000"+ "\u00e1\u00d9\u0001\u0000\u0000\u0000\u00e1\u00e0\u0001\u0000\u0000\u0000"+
"\u00e2\u00eb\u0001\u0000\u0000\u0000\u00e3\u00e4\n\u0005\u0000\u0000\u00e4"+ "\u00e2\u00eb\u0001\u0000\u0000\u0000\u00e3\u00e4\n\u0005\u0000\u0000\u00e4"+
"\u00e5\u0005$\u0000\u0000\u00e5\u00ea\u0003\n\u0005\u0006\u00e6\u00e7"+ "\u00e5\u0005$\u0000\u0000\u00e5\u00ea\u0003\n\u0005\u0006\u00e6\u00e7"+
"\n\u0004\u0000\u0000\u00e7\u00e8\u00056\u0000\u0000\u00e8\u00ea\u0003"+ "\n\u0004\u0000\u0000\u00e7\u00e8\u00055\u0000\u0000\u00e8\u00ea\u0003"+
"\n\u0005\u0005\u00e9\u00e3\u0001\u0000\u0000\u0000\u00e9\u00e6\u0001\u0000"+ "\n\u0005\u0005\u00e9\u00e3\u0001\u0000\u0000\u0000\u00e9\u00e6\u0001\u0000"+
"\u0000\u0000\u00ea\u00ed\u0001\u0000\u0000\u0000\u00eb\u00e9\u0001\u0000"+ "\u0000\u0000\u00ea\u00ed\u0001\u0000\u0000\u0000\u00eb\u00e9\u0001\u0000"+
"\u0000\u0000\u00eb\u00ec\u0001\u0000\u0000\u0000\u00ec\u000b\u0001\u0000"+ "\u0000\u0000\u00eb\u00ec\u0001\u0000\u0000\u0000\u00ec\u000b\u0001\u0000"+
"\u0000\u0000\u00ed\u00eb\u0001\u0000\u0000\u0000\u00ee\u00f0\u0003\u0010"+ "\u0000\u0000\u00ed\u00eb\u0001\u0000\u0000\u0000\u00ee\u00f0\u0003\u0010"+
"\b\u0000\u00ef\u00f1\u00053\u0000\u0000\u00f0\u00ef\u0001\u0000\u0000"+ "\b\u0000\u00ef\u00f1\u00052\u0000\u0000\u00f0\u00ef\u0001\u0000\u0000"+
"\u0000\u00f0\u00f1\u0001\u0000\u0000\u0000\u00f1\u00f2\u0001\u0000\u0000"+ "\u0000\u00f0\u00f1\u0001\u0000\u0000\u0000\u00f1\u00f2\u0001\u0000\u0000"+
"\u0000\u00f2\u00f3\u00051\u0000\u0000\u00f3\u00f4\u0003j5\u0000\u00f4"+ "\u0000\u00f2\u00f3\u00051\u0000\u0000\u00f3\u00f4\u0003j5\u0000\u00f4"+
"\u00fd\u0001\u0000\u0000\u0000\u00f5\u00f7\u0003\u0010\b\u0000\u00f6\u00f8"+ "\u00fd\u0001\u0000\u0000\u0000\u00f5\u00f7\u0003\u0010\b\u0000\u00f6\u00f8"+
"\u00053\u0000\u0000\u00f7\u00f6\u0001\u0000\u0000\u0000\u00f7\u00f8\u0001"+ "\u00052\u0000\u0000\u00f7\u00f6\u0001\u0000\u0000\u0000\u00f7\u00f8\u0001"+
"\u0000\u0000\u0000\u00f8\u00f9\u0001\u0000\u0000\u0000\u00f9\u00fa\u0005"+ "\u0000\u0000\u0000\u00f8\u00f9\u0001\u0000\u0000\u0000\u00f9\u00fa\u0005"+
"8\u0000\u0000\u00fa\u00fb\u0003j5\u0000\u00fb\u00fd\u0001\u0000\u0000"+ "7\u0000\u0000\u00fa\u00fb\u0003j5\u0000\u00fb\u00fd\u0001\u0000\u0000"+
"\u0000\u00fc\u00ee\u0001\u0000\u0000\u0000\u00fc\u00f5\u0001\u0000\u0000"+ "\u0000\u00fc\u00ee\u0001\u0000\u0000\u0000\u00fc\u00f5\u0001\u0000\u0000"+
"\u0000\u00fd\r\u0001\u0000\u0000\u0000\u00fe\u0101\u0003:\u001d\u0000"+ "\u0000\u00fd\r\u0001\u0000\u0000\u0000\u00fe\u0101\u0003:\u001d\u0000"+
"\u00ff\u0100\u0005\'\u0000\u0000\u0100\u0102\u0003\u001e\u000f\u0000\u0101"+ "\u00ff\u0100\u0005\'\u0000\u0000\u0100\u0102\u0003\u001e\u000f\u0000\u0101"+
@ -6571,8 +6571,8 @@ public class EsqlBaseParser extends ParserConfig {
"\u011c\u0013\u0001\u0000\u0000\u0000\u011d\u011b\u0001\u0000\u0000\u0000"+ "\u011c\u0013\u0001\u0000\u0000\u0000\u011d\u011b\u0001\u0000\u0000\u0000"+
"\u011e\u011f\u0006\n\uffff\uffff\u0000\u011f\u0127\u0003D\"\u0000\u0120"+ "\u011e\u011f\u0006\n\uffff\uffff\u0000\u011f\u0127\u0003D\"\u0000\u0120"+
"\u0127\u0003:\u001d\u0000\u0121\u0127\u0003\u0016\u000b\u0000\u0122\u0123"+ "\u0127\u0003:\u001d\u0000\u0121\u0127\u0003\u0016\u000b\u0000\u0122\u0123"+
"\u00052\u0000\u0000\u0123\u0124\u0003\n\u0005\u0000\u0124\u0125\u0005"+ "\u0005J\u0000\u0000\u0123\u0124\u0003\n\u0005\u0000\u0124\u0125\u0005"+
"9\u0000\u0000\u0125\u0127\u0001\u0000\u0000\u0000\u0126\u011e\u0001\u0000"+ "K\u0000\u0000\u0125\u0127\u0001\u0000\u0000\u0000\u0126\u011e\u0001\u0000"+
"\u0000\u0000\u0126\u0120\u0001\u0000\u0000\u0000\u0126\u0121\u0001\u0000"+ "\u0000\u0000\u0126\u0120\u0001\u0000\u0000\u0000\u0126\u0121\u0001\u0000"+
"\u0000\u0000\u0126\u0122\u0001\u0000\u0000\u0000\u0127\u012d\u0001\u0000"+ "\u0000\u0000\u0126\u0122\u0001\u0000\u0000\u0000\u0127\u012d\u0001\u0000"+
"\u0000\u0000\u0128\u0129\n\u0001\u0000\u0000\u0129\u012a\u0005\'\u0000"+ "\u0000\u0000\u0128\u0129\n\u0001\u0000\u0000\u0129\u012a\u0005\'\u0000"+
@ -6580,7 +6580,7 @@ public class EsqlBaseParser extends ParserConfig {
"\u0000\u012c\u012f\u0001\u0000\u0000\u0000\u012d\u012b\u0001\u0000\u0000"+ "\u0000\u012c\u012f\u0001\u0000\u0000\u0000\u012d\u012b\u0001\u0000\u0000"+
"\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\u0015\u0001\u0000\u0000"+ "\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\u0015\u0001\u0000\u0000"+
"\u0000\u012f\u012d\u0001\u0000\u0000\u0000\u0130\u0131\u0003\u0018\f\u0000"+ "\u0000\u012f\u012d\u0001\u0000\u0000\u0000\u0130\u0131\u0003\u0018\f\u0000"+
"\u0131\u013f\u00052\u0000\u0000\u0132\u0140\u0005D\u0000\u0000\u0133\u0138"+ "\u0131\u013f\u0005J\u0000\u0000\u0132\u0140\u0005B\u0000\u0000\u0133\u0138"+
"\u0003\n\u0005\u0000\u0134\u0135\u0005)\u0000\u0000\u0135\u0137\u0003"+ "\u0003\n\u0005\u0000\u0134\u0135\u0005)\u0000\u0000\u0135\u0137\u0003"+
"\n\u0005\u0000\u0136\u0134\u0001\u0000\u0000\u0000\u0137\u013a\u0001\u0000"+ "\n\u0005\u0000\u0136\u0134\u0001\u0000\u0000\u0000\u0137\u013a\u0001\u0000"+
"\u0000\u0000\u0138\u0136\u0001\u0000\u0000\u0000\u0138\u0139\u0001\u0000"+ "\u0000\u0000\u0138\u0136\u0001\u0000\u0000\u0000\u0138\u0139\u0001\u0000"+
@ -6589,14 +6589,14 @@ public class EsqlBaseParser extends ParserConfig {
"\u0000\u013d\u013b\u0001\u0000\u0000\u0000\u013d\u013e\u0001\u0000\u0000"+ "\u0000\u013d\u013b\u0001\u0000\u0000\u0000\u013d\u013e\u0001\u0000\u0000"+
"\u0000\u013e\u0140\u0001\u0000\u0000\u0000\u013f\u0132\u0001\u0000\u0000"+ "\u0000\u013e\u0140\u0001\u0000\u0000\u0000\u013f\u0132\u0001\u0000\u0000"+
"\u0000\u013f\u0133\u0001\u0000\u0000\u0000\u013f\u0140\u0001\u0000\u0000"+ "\u0000\u013f\u0133\u0001\u0000\u0000\u0000\u013f\u0140\u0001\u0000\u0000"+
"\u0000\u0140\u0141\u0001\u0000\u0000\u0000\u0141\u0142\u00059\u0000\u0000"+ "\u0000\u0140\u0141\u0001\u0000\u0000\u0000\u0141\u0142\u0005K\u0000\u0000"+
"\u0142\u0017\u0001\u0000\u0000\u0000\u0143\u0144\u0003H$\u0000\u0144\u0019"+ "\u0142\u0017\u0001\u0000\u0000\u0000\u0143\u0144\u0003H$\u0000\u0144\u0019"+
"\u0001\u0000\u0000\u0000\u0145\u0146\u0005G\u0000\u0000\u0146\u014b\u0003"+ "\u0001\u0000\u0000\u0000\u0145\u0146\u0005E\u0000\u0000\u0146\u014b\u0003"+
"\u001c\u000e\u0000\u0147\u0148\u0005)\u0000\u0000\u0148\u014a\u0003\u001c"+ "\u001c\u000e\u0000\u0147\u0148\u0005)\u0000\u0000\u0148\u014a\u0003\u001c"+
"\u000e\u0000\u0149\u0147\u0001\u0000\u0000\u0000\u014a\u014d\u0001\u0000"+ "\u000e\u0000\u0149\u0147\u0001\u0000\u0000\u0000\u014a\u014d\u0001\u0000"+
"\u0000\u0000\u014b\u0149\u0001\u0000\u0000\u0000\u014b\u014c\u0001\u0000"+ "\u0000\u0000\u014b\u0149\u0001\u0000\u0000\u0000\u014b\u014c\u0001\u0000"+
"\u0000\u0000\u014c\u014e\u0001\u0000\u0000\u0000\u014d\u014b\u0001\u0000"+ "\u0000\u0000\u014c\u014e\u0001\u0000\u0000\u0000\u014d\u014b\u0001\u0000"+
"\u0000\u0000\u014e\u014f\u0005H\u0000\u0000\u014f\u001b\u0001\u0000\u0000"+ "\u0000\u0000\u014e\u014f\u0005F\u0000\u0000\u014f\u001b\u0001\u0000\u0000"+
"\u0000\u0150\u0151\u0003j5\u0000\u0151\u0152\u0005(\u0000\u0000\u0152"+ "\u0000\u0150\u0151\u0003j5\u0000\u0151\u0152\u0005(\u0000\u0000\u0152"+
"\u0153\u0003D\"\u0000\u0153\u001d\u0001\u0000\u0000\u0000\u0154\u0155"+ "\u0153\u0003D\"\u0000\u0153\u001d\u0001\u0000\u0000\u0000\u0154\u0155"+
"\u0003@ \u0000\u0155\u001f\u0001\u0000\u0000\u0000\u0156\u0157\u0005\f"+ "\u0003@ \u0000\u0155\u001f\u0001\u0000\u0000\u0000\u0156\u0157\u0005\f"+
@ -6663,33 +6663,33 @@ public class EsqlBaseParser extends ParserConfig {
"\u0000\u01c8\u01c9\u0007\u0003\u0000\u0000\u01c9A\u0001\u0000\u0000\u0000"+ "\u0000\u01c8\u01c9\u0007\u0003\u0000\u0000\u01c9A\u0001\u0000\u0000\u0000"+
"\u01ca\u01cd\u0005Y\u0000\u0000\u01cb\u01cd\u0003F#\u0000\u01cc\u01ca"+ "\u01ca\u01cd\u0005Y\u0000\u0000\u01cb\u01cd\u0003F#\u0000\u01cc\u01ca"+
"\u0001\u0000\u0000\u0000\u01cc\u01cb\u0001\u0000\u0000\u0000\u01cdC\u0001"+ "\u0001\u0000\u0000\u0000\u01cc\u01cb\u0001\u0000\u0000\u0000\u01cdC\u0001"+
"\u0000\u0000\u0000\u01ce\u01f9\u00054\u0000\u0000\u01cf\u01d0\u0003h4"+ "\u0000\u0000\u0000\u01ce\u01f9\u00053\u0000\u0000\u01cf\u01d0\u0003h4"+
"\u0000\u01d0\u01d1\u0005L\u0000\u0000\u01d1\u01f9\u0001\u0000\u0000\u0000"+ "\u0000\u01d0\u01d1\u0005L\u0000\u0000\u01d1\u01f9\u0001\u0000\u0000\u0000"+
"\u01d2\u01f9\u0003f3\u0000\u01d3\u01f9\u0003h4\u0000\u01d4\u01f9\u0003"+ "\u01d2\u01f9\u0003f3\u0000\u01d3\u01f9\u0003h4\u0000\u01d4\u01f9\u0003"+
"b1\u0000\u01d5\u01f9\u0003F#\u0000\u01d6\u01f9\u0003j5\u0000\u01d7\u01d8"+ "b1\u0000\u01d5\u01f9\u0003F#\u0000\u01d6\u01f9\u0003j5\u0000\u01d7\u01d8"+
"\u0005J\u0000\u0000\u01d8\u01dd\u0003d2\u0000\u01d9\u01da\u0005)\u0000"+ "\u0005H\u0000\u0000\u01d8\u01dd\u0003d2\u0000\u01d9\u01da\u0005)\u0000"+
"\u0000\u01da\u01dc\u0003d2\u0000\u01db\u01d9\u0001\u0000\u0000\u0000\u01dc"+ "\u0000\u01da\u01dc\u0003d2\u0000\u01db\u01d9\u0001\u0000\u0000\u0000\u01dc"+
"\u01df\u0001\u0000\u0000\u0000\u01dd\u01db\u0001\u0000\u0000\u0000\u01dd"+ "\u01df\u0001\u0000\u0000\u0000\u01dd\u01db\u0001\u0000\u0000\u0000\u01dd"+
"\u01de\u0001\u0000\u0000\u0000\u01de\u01e0\u0001\u0000\u0000\u0000\u01df"+ "\u01de\u0001\u0000\u0000\u0000\u01de\u01e0\u0001\u0000\u0000\u0000\u01df"+
"\u01dd\u0001\u0000\u0000\u0000\u01e0\u01e1\u0005K\u0000\u0000\u01e1\u01f9"+ "\u01dd\u0001\u0000\u0000\u0000\u01e0\u01e1\u0005I\u0000\u0000\u01e1\u01f9"+
"\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005J\u0000\u0000\u01e3\u01e8\u0003"+ "\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005H\u0000\u0000\u01e3\u01e8\u0003"+
"b1\u0000\u01e4\u01e5\u0005)\u0000\u0000\u01e5\u01e7\u0003b1\u0000\u01e6"+ "b1\u0000\u01e4\u01e5\u0005)\u0000\u0000\u01e5\u01e7\u0003b1\u0000\u01e6"+
"\u01e4\u0001\u0000\u0000\u0000\u01e7\u01ea\u0001\u0000\u0000\u0000\u01e8"+ "\u01e4\u0001\u0000\u0000\u0000\u01e7\u01ea\u0001\u0000\u0000\u0000\u01e8"+
"\u01e6\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9"+ "\u01e6\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9"+
"\u01eb\u0001\u0000\u0000\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000\u01eb"+ "\u01eb\u0001\u0000\u0000\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000\u01eb"+
"\u01ec\u0005K\u0000\u0000\u01ec\u01f9\u0001\u0000\u0000\u0000\u01ed\u01ee"+ "\u01ec\u0005I\u0000\u0000\u01ec\u01f9\u0001\u0000\u0000\u0000\u01ed\u01ee"+
"\u0005J\u0000\u0000\u01ee\u01f3\u0003j5\u0000\u01ef\u01f0\u0005)\u0000"+ "\u0005H\u0000\u0000\u01ee\u01f3\u0003j5\u0000\u01ef\u01f0\u0005)\u0000"+
"\u0000\u01f0\u01f2\u0003j5\u0000\u01f1\u01ef\u0001\u0000\u0000\u0000\u01f2"+ "\u0000\u01f0\u01f2\u0003j5\u0000\u01f1\u01ef\u0001\u0000\u0000\u0000\u01f2"+
"\u01f5\u0001\u0000\u0000\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000\u01f3"+ "\u01f5\u0001\u0000\u0000\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000\u01f3"+
"\u01f4\u0001\u0000\u0000\u0000\u01f4\u01f6\u0001\u0000\u0000\u0000\u01f5"+ "\u01f4\u0001\u0000\u0000\u0000\u01f4\u01f6\u0001\u0000\u0000\u0000\u01f5"+
"\u01f3\u0001\u0000\u0000\u0000\u01f6\u01f7\u0005K\u0000\u0000\u01f7\u01f9"+ "\u01f3\u0001\u0000\u0000\u0000\u01f6\u01f7\u0005I\u0000\u0000\u01f7\u01f9"+
"\u0001\u0000\u0000\u0000\u01f8\u01ce\u0001\u0000\u0000\u0000\u01f8\u01cf"+ "\u0001\u0000\u0000\u0000\u01f8\u01ce\u0001\u0000\u0000\u0000\u01f8\u01cf"+
"\u0001\u0000\u0000\u0000\u01f8\u01d2\u0001\u0000\u0000\u0000\u01f8\u01d3"+ "\u0001\u0000\u0000\u0000\u01f8\u01d2\u0001\u0000\u0000\u0000\u01f8\u01d3"+
"\u0001\u0000\u0000\u0000\u01f8\u01d4\u0001\u0000\u0000\u0000\u01f8\u01d5"+ "\u0001\u0000\u0000\u0000\u01f8\u01d4\u0001\u0000\u0000\u0000\u01f8\u01d5"+
"\u0001\u0000\u0000\u0000\u01f8\u01d6\u0001\u0000\u0000\u0000\u01f8\u01d7"+ "\u0001\u0000\u0000\u0000\u01f8\u01d6\u0001\u0000\u0000\u0000\u01f8\u01d7"+
"\u0001\u0000\u0000\u0000\u01f8\u01e2\u0001\u0000\u0000\u0000\u01f8\u01ed"+ "\u0001\u0000\u0000\u0000\u01f8\u01e2\u0001\u0000\u0000\u0000\u01f8\u01ed"+
"\u0001\u0000\u0000\u0000\u01f9E\u0001\u0000\u0000\u0000\u01fa\u01fd\u0005"+ "\u0001\u0000\u0000\u0000\u01f9E\u0001\u0000\u0000\u0000\u01fa\u01fd\u0005"+
"7\u0000\u0000\u01fb\u01fd\u0005I\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000"+ "6\u0000\u0000\u01fb\u01fd\u0005G\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000"+
"\u0000\u01fc\u01fb\u0001\u0000\u0000\u0000\u01fdG\u0001\u0000\u0000\u0000"+ "\u0000\u01fc\u01fb\u0001\u0000\u0000\u0000\u01fdG\u0001\u0000\u0000\u0000"+
"\u01fe\u0201\u0003@ \u0000\u01ff\u0201\u0003F#\u0000\u0200\u01fe\u0001"+ "\u01fe\u0201\u0003@ \u0000\u01ff\u0201\u0003F#\u0000\u0200\u01fe\u0001"+
"\u0000\u0000\u0000\u0200\u01ff\u0001\u0000\u0000\u0000\u0201I\u0001\u0000"+ "\u0000\u0000\u0000\u0200\u01ff\u0001\u0000\u0000\u0000\u0201I\u0001\u0000"+
@ -6701,7 +6701,7 @@ public class EsqlBaseParser extends ParserConfig {
"\u0000\u0000\u0000\u020cM\u0001\u0000\u0000\u0000\u020d\u020b\u0001\u0000"+ "\u0000\u0000\u0000\u020cM\u0001\u0000\u0000\u0000\u020d\u020b\u0001\u0000"+
"\u0000\u0000\u020e\u0210\u0003\n\u0005\u0000\u020f\u0211\u0007\u0004\u0000"+ "\u0000\u0000\u020e\u0210\u0003\n\u0005\u0000\u020f\u0211\u0007\u0004\u0000"+
"\u0000\u0210\u020f\u0001\u0000\u0000\u0000\u0210\u0211\u0001\u0000\u0000"+ "\u0000\u0210\u020f\u0001\u0000\u0000\u0000\u0210\u0211\u0001\u0000\u0000"+
"\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212\u0213\u00055\u0000\u0000"+ "\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212\u0213\u00054\u0000\u0000"+
"\u0213\u0215\u0007\u0005\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000"+ "\u0213\u0215\u0007\u0005\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000"+
"\u0214\u0215\u0001\u0000\u0000\u0000\u0215O\u0001\u0000\u0000\u0000\u0216"+ "\u0214\u0215\u0001\u0000\u0000\u0000\u0215O\u0001\u0000\u0000\u0000\u0216"+
"\u0217\u0005\b\u0000\u0000\u0217\u0218\u0003>\u001f\u0000\u0218Q\u0001"+ "\u0217\u0005\b\u0000\u0000\u0217\u0218\u0003>\u001f\u0000\u0218Q\u0001"+
@ -6736,8 +6736,8 @@ public class EsqlBaseParser extends ParserConfig {
"\u0000\u0251i\u0001\u0000\u0000\u0000\u0252\u0253\u0005 \u0000\u0000\u0253"+ "\u0000\u0251i\u0001\u0000\u0000\u0000\u0252\u0253\u0005 \u0000\u0000\u0253"+
"k\u0001\u0000\u0000\u0000\u0254\u0255\u0007\u0007\u0000\u0000\u0255m\u0001"+ "k\u0001\u0000\u0000\u0000\u0254\u0255\u0007\u0007\u0000\u0000\u0255m\u0001"+
"\u0000\u0000\u0000\u0256\u0257\u0005\u0005\u0000\u0000\u0257\u0258\u0003"+ "\u0000\u0000\u0000\u0256\u0257\u0005\u0005\u0000\u0000\u0257\u0258\u0003"+
"p8\u0000\u0258o\u0001\u0000\u0000\u0000\u0259\u025a\u0005J\u0000\u0000"+ "p8\u0000\u0258o\u0001\u0000\u0000\u0000\u0259\u025a\u0005H\u0000\u0000"+
"\u025a\u025b\u0003\u0002\u0001\u0000\u025b\u025c\u0005K\u0000\u0000\u025c"+ "\u025a\u025b\u0003\u0002\u0001\u0000\u025b\u025c\u0005I\u0000\u0000\u025c"+
"q\u0001\u0000\u0000\u0000\u025d\u025e\u0005\r\u0000\u0000\u025e\u025f"+ "q\u0001\u0000\u0000\u0000\u025d\u025e\u0005\r\u0000\u0000\u025e\u025f"+
"\u0005m\u0000\u0000\u025fs\u0001\u0000\u0000\u0000\u0260\u0261\u0005\u0003"+ "\u0005m\u0000\u0000\u025fs\u0001\u0000\u0000\u0000\u0260\u0261\u0005\u0003"+
"\u0000\u0000\u0261\u0264\u0005c\u0000\u0000\u0262\u0263\u0005a\u0000\u0000"+ "\u0000\u0000\u0261\u0264\u0005c\u0000\u0000\u0262\u0263\u0005a\u0000\u0000"+
@ -6779,8 +6779,8 @@ public class EsqlBaseParser extends ParserConfig {
"\u02a7\u0089\u0001\u0000\u0000\u0000\u02a8\u02aa\u0003\u008cF\u0000\u02a9"+ "\u02a7\u0089\u0001\u0000\u0000\u0000\u02a8\u02aa\u0003\u008cF\u0000\u02a9"+
"\u02a8\u0001\u0000\u0000\u0000\u02aa\u02ab\u0001\u0000\u0000\u0000\u02ab"+ "\u02a8\u0001\u0000\u0000\u0000\u02aa\u02ab\u0001\u0000\u0000\u0000\u02ab"+
"\u02a9\u0001\u0000\u0000\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ac"+ "\u02a9\u0001\u0000\u0000\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ac"+
"\u008b\u0001\u0000\u0000\u0000\u02ad\u02ae\u00052\u0000\u0000\u02ae\u02af"+ "\u008b\u0001\u0000\u0000\u0000\u02ad\u02ae\u0005J\u0000\u0000\u02ae\u02af"+
"\u0003\u008eG\u0000\u02af\u02b0\u00059\u0000\u0000\u02b0\u008d\u0001\u0000"+ "\u0003\u008eG\u0000\u02af\u02b0\u0005K\u0000\u0000\u02b0\u008d\u0001\u0000"+
"\u0000\u0000\u02b1\u02b2\u0006G\uffff\uffff\u0000\u02b2\u02b3\u0003\u0090"+ "\u0000\u0000\u02b1\u02b2\u0006G\uffff\uffff\u0000\u02b2\u02b3\u0003\u0090"+
"H\u0000\u02b3\u02b9\u0001\u0000\u0000\u0000\u02b4\u02b5\n\u0001\u0000"+ "H\u0000\u02b3\u02b9\u0001\u0000\u0000\u0000\u02b4\u02b5\n\u0001\u0000"+
"\u0000\u02b5\u02b6\u0005\u001f\u0000\u0000\u02b6\u02b8\u0003\u0090H\u0000"+ "\u0000\u02b5\u02b6\u0005\u001f\u0000\u0000\u02b6\u02b8\u0003\u0090H\u0000"+

View file

@ -3085,7 +3085,28 @@ public class StatementParserTests extends AbstractStatementParserTests {
"line 1:20: mismatched input 'FORK' expecting {'limit', 'sort', 'where'}" "line 1:20: mismatched input 'FORK' expecting {'limit', 'sort', 'where'}"
); );
expectError("FROM foo* | FORK ( x+1 ) ( WHERE y>2 )", "line 1:20: mismatched input 'x+1' expecting {'limit', 'sort', 'where'}"); expectError("FROM foo* | FORK ( x+1 ) ( WHERE y>2 )", "line 1:20: mismatched input 'x+1' expecting {'limit', 'sort', 'where'}");
expectError("FROM foo* | FORK ( LIMIT 10 ) ( y+2 )", "line 1:33: mismatched input 'y' expecting {'limit', 'sort', 'where'}"); expectError("FROM foo* | FORK ( LIMIT 10 ) ( y+2 )", "line 1:33: mismatched input 'y+2' expecting {'limit', 'sort', 'where'}");
}
public void testFieldNamesAsCommands() throws Exception {
String[] keywords = new String[] {
"dissect",
"drop",
"enrich",
"eval",
"explain",
"from",
"grok",
"keep",
"limit",
"mv_expand",
"rename",
"sort",
"stats" };
for (String keyword : keywords) {
var plan = statement("FROM test | STATS avg(" + keyword + ")");
var aggregate = as(plan, Aggregate.class);
}
} }
static Alias alias(String name, Expression value) { static Alias alias(String name, Expression value) {

View file

@ -34,8 +34,8 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingFloat; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbedding;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
@ -164,24 +164,24 @@ public class TestDenseInferenceServiceExtension implements InferenceServiceExten
} }
} }
private InferenceTextEmbeddingFloatResults makeResults(List<String> input, int dimensions) { private TextEmbeddingFloatResults makeResults(List<String> input, int dimensions) {
List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> embeddings = new ArrayList<>(); List<TextEmbeddingFloatResults.Embedding> embeddings = new ArrayList<>();
for (String inputString : input) { for (String inputString : input) {
List<Float> floatEmbeddings = generateEmbedding(inputString, dimensions); List<Float> floatEmbeddings = generateEmbedding(inputString, dimensions);
embeddings.add(InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(floatEmbeddings)); embeddings.add(TextEmbeddingFloatResults.Embedding.of(floatEmbeddings));
} }
return new InferenceTextEmbeddingFloatResults(embeddings); return new TextEmbeddingFloatResults(embeddings);
} }
private List<ChunkedInference> makeChunkedResults(List<String> input, int dimensions) { private List<ChunkedInference> makeChunkedResults(List<String> input, int dimensions) {
InferenceTextEmbeddingFloatResults nonChunkedResults = makeResults(input, dimensions); TextEmbeddingFloatResults nonChunkedResults = makeResults(input, dimensions);
var results = new ArrayList<ChunkedInference>(); var results = new ArrayList<ChunkedInference>();
for (int i = 0; i < input.size(); i++) { for (int i = 0; i < input.size(); i++) {
results.add( results.add(
new ChunkedInferenceEmbeddingFloat( new ChunkedInferenceEmbedding(
List.of( List.of(
new ChunkedInferenceEmbeddingFloat.FloatEmbeddingChunk( new TextEmbeddingFloatResults.Chunk(
nonChunkedResults.embeddings().get(i).values(), nonChunkedResults.embeddings().get(i).values(),
input.get(i), input.get(i),
new ChunkedInference.TextOffset(0, input.get(i).length()) new ChunkedInference.TextOffset(0, input.get(i).length())

View file

@ -32,7 +32,7 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbedding;
import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults;
import org.elasticsearch.xpack.core.ml.search.WeightedToken; import org.elasticsearch.xpack.core.ml.search.WeightedToken;
@ -171,9 +171,9 @@ public class TestSparseInferenceServiceExtension implements InferenceServiceExte
tokens.add(new WeightedToken("feature_" + j, generateEmbedding(input.get(i), j))); tokens.add(new WeightedToken("feature_" + j, generateEmbedding(input.get(i), j)));
} }
results.add( results.add(
new ChunkedInferenceEmbeddingSparse( new ChunkedInferenceEmbedding(
List.of( List.of(
new ChunkedInferenceEmbeddingSparse.SparseEmbeddingChunk( new SparseEmbeddingResults.Chunk(
tokens, tokens,
input.get(i), input.get(i),
new ChunkedInference.TextOffset(0, input.get(i).length()) new ChunkedInference.TextOffset(0, input.get(i).length())

View file

@ -18,13 +18,13 @@ import org.elasticsearch.inference.ServiceSettings;
import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskSettings;
import org.elasticsearch.inference.UnifiedCompletionRequest; import org.elasticsearch.inference.UnifiedCompletionRequest;
import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults;
import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults;
import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults;
import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults;
import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults; import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults;
import org.elasticsearch.xpack.core.inference.results.StreamingUnifiedChatCompletionResults; import org.elasticsearch.xpack.core.inference.results.StreamingUnifiedChatCompletionResults;
import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults;
import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager;
import org.elasticsearch.xpack.inference.chunking.SentenceBoundaryChunkingSettings; import org.elasticsearch.xpack.inference.chunking.SentenceBoundaryChunkingSettings;
import org.elasticsearch.xpack.inference.chunking.WordBoundaryChunkingSettings; import org.elasticsearch.xpack.inference.chunking.WordBoundaryChunkingSettings;
@ -59,6 +59,7 @@ import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbedd
import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankServiceSettings; import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankServiceSettings;
import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankTaskSettings; import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankTaskSettings;
import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSparseEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSparseEmbeddingsServiceSettings;
import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionServiceSettings;
import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalServiceSettings;
import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalTextEmbeddingServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalTextEmbeddingServiceSettings;
import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticRerankerServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticRerankerServiceSettings;
@ -473,18 +474,10 @@ public class InferenceNamedWriteablesProvider {
new NamedWriteableRegistry.Entry(InferenceServiceResults.class, SparseEmbeddingResults.NAME, SparseEmbeddingResults::new) new NamedWriteableRegistry.Entry(InferenceServiceResults.class, SparseEmbeddingResults.NAME, SparseEmbeddingResults::new)
); );
namedWriteables.add( namedWriteables.add(
new NamedWriteableRegistry.Entry( new NamedWriteableRegistry.Entry(InferenceServiceResults.class, TextEmbeddingFloatResults.NAME, TextEmbeddingFloatResults::new)
InferenceServiceResults.class,
InferenceTextEmbeddingFloatResults.NAME,
InferenceTextEmbeddingFloatResults::new
)
); );
namedWriteables.add( namedWriteables.add(
new NamedWriteableRegistry.Entry( new NamedWriteableRegistry.Entry(InferenceServiceResults.class, TextEmbeddingByteResults.NAME, TextEmbeddingByteResults::new)
InferenceServiceResults.class,
InferenceTextEmbeddingByteResults.NAME,
InferenceTextEmbeddingByteResults::new
)
); );
namedWriteables.add( namedWriteables.add(
new NamedWriteableRegistry.Entry(InferenceServiceResults.class, ChatCompletionResults.NAME, ChatCompletionResults::new) new NamedWriteableRegistry.Entry(InferenceServiceResults.class, ChatCompletionResults.NAME, ChatCompletionResults::new)
@ -662,5 +655,12 @@ public class InferenceNamedWriteablesProvider {
ElasticInferenceServiceSparseEmbeddingsServiceSettings::new ElasticInferenceServiceSparseEmbeddingsServiceSettings::new
) )
); );
namedWriteables.add(
new NamedWriteableRegistry.Entry(
ServiceSettings.class,
ElasticInferenceServiceCompletionServiceSettings.NAME,
ElasticInferenceServiceCompletionServiceSettings::new
)
);
} }
} }

View file

@ -10,24 +10,19 @@ package org.elasticsearch.xpack.inference.chunking;
import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper;
import org.elasticsearch.inference.ChunkedInference; import org.elasticsearch.inference.ChunkedInference;
import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.ChunkingSettings;
import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InferenceServiceResults;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingByte; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbedding;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingFloat;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError;
import org.elasticsearch.xpack.core.inference.results.InferenceByteEmbedding; import org.elasticsearch.xpack.core.inference.results.EmbeddingResults;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.inference.chunking.Chunker.ChunkOffset;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults;
import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function; import java.util.concurrent.atomic.AtomicReferenceArray;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/** /**
@ -43,145 +38,72 @@ import java.util.stream.Collectors;
*/ */
public class EmbeddingRequestChunker { public class EmbeddingRequestChunker {
public enum EmbeddingType { // Visible for testing
FLOAT, record Request(int inputIndex, int chunkIndex, ChunkOffset chunk, List<String> inputs) {
BYTE, public String chunkText() {
SPARSE; return inputs.get(inputIndex).substring(chunk.start(), chunk.end());
public static EmbeddingType fromDenseVectorElementType(DenseVectorFieldMapper.ElementType elementType) {
return switch (elementType) {
case BYTE -> EmbeddingType.BYTE;
case FLOAT -> EmbeddingType.FLOAT;
case BIT -> throw new IllegalArgumentException("Bit vectors are not supported");
};
} }
}; }
public static final int DEFAULT_WORDS_PER_CHUNK = 250; public record BatchRequest(List<Request> requests) {
public static final int DEFAULT_CHUNK_OVERLAP = 100; public List<String> inputs() {
return requests.stream().map(Request::chunkText).collect(Collectors.toList());
}
}
private final List<BatchRequest> batchedRequests = new ArrayList<>(); public record BatchRequestAndListener(BatchRequest batch, ActionListener<InferenceServiceResults> listener) {}
private static final int DEFAULT_WORDS_PER_CHUNK = 250;
private static final int DEFAULT_CHUNK_OVERLAP = 100;
private final List<String> inputs;
private final List<List<Request>> requests;
private final List<BatchRequest> batchRequests;
private final AtomicInteger resultCount = new AtomicInteger(); private final AtomicInteger resultCount = new AtomicInteger();
private final int maxNumberOfInputsPerBatch;
private final int wordsPerChunk;
private final int chunkOverlap;
private final EmbeddingType embeddingType;
private final ChunkingSettings chunkingSettings;
private List<ChunkOffsetsAndInput> chunkedOffsets; private final List<AtomicReferenceArray<EmbeddingResults.Embedding<?>>> results;
private List<AtomicArray<List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding>>> floatResults; private final AtomicArray<Exception> errors;
private List<AtomicArray<List<InferenceByteEmbedding>>> byteResults;
private List<AtomicArray<List<SparseEmbeddingResults.Embedding>>> sparseResults;
private AtomicArray<Exception> errors;
private ActionListener<List<ChunkedInference>> finalListener; private ActionListener<List<ChunkedInference>> finalListener;
public EmbeddingRequestChunker(List<String> inputs, int maxNumberOfInputsPerBatch, EmbeddingType embeddingType) { public EmbeddingRequestChunker(List<String> inputs, int maxNumberOfInputsPerBatch) {
this(inputs, maxNumberOfInputsPerBatch, DEFAULT_WORDS_PER_CHUNK, DEFAULT_CHUNK_OVERLAP, embeddingType); this(inputs, maxNumberOfInputsPerBatch, null);
} }
public EmbeddingRequestChunker( public EmbeddingRequestChunker(List<String> inputs, int maxNumberOfInputsPerBatch, int wordsPerChunk, int chunkOverlap) {
List<String> inputs, this(inputs, maxNumberOfInputsPerBatch, new WordBoundaryChunkingSettings(wordsPerChunk, chunkOverlap));
int maxNumberOfInputsPerBatch,
int wordsPerChunk,
int chunkOverlap,
EmbeddingType embeddingType
) {
this.maxNumberOfInputsPerBatch = maxNumberOfInputsPerBatch;
this.wordsPerChunk = wordsPerChunk;
this.chunkOverlap = chunkOverlap;
this.embeddingType = embeddingType;
this.chunkingSettings = null;
splitIntoBatchedRequests(inputs);
} }
public EmbeddingRequestChunker( public EmbeddingRequestChunker(List<String> inputs, int maxNumberOfInputsPerBatch, ChunkingSettings chunkingSettings) {
List<String> inputs, this.inputs = inputs;
int maxNumberOfInputsPerBatch, this.results = new ArrayList<>(inputs.size());
EmbeddingType embeddingType, this.errors = new AtomicArray<>(inputs.size());
ChunkingSettings chunkingSettings
) {
this.maxNumberOfInputsPerBatch = maxNumberOfInputsPerBatch;
this.wordsPerChunk = DEFAULT_WORDS_PER_CHUNK; // Can be removed after ChunkingConfigurationFeatureFlag is enabled
this.chunkOverlap = DEFAULT_CHUNK_OVERLAP; // Can be removed after ChunkingConfigurationFeatureFlag is enabled
this.embeddingType = embeddingType;
this.chunkingSettings = chunkingSettings;
splitIntoBatchedRequests(inputs);
}
private void splitIntoBatchedRequests(List<String> inputs) { if (chunkingSettings == null) {
Function<String, List<Chunker.ChunkOffset>> chunkFunction; chunkingSettings = new WordBoundaryChunkingSettings(DEFAULT_WORDS_PER_CHUNK, DEFAULT_CHUNK_OVERLAP);
if (chunkingSettings != null) {
var chunker = ChunkerBuilder.fromChunkingStrategy(chunkingSettings.getChunkingStrategy());
chunkFunction = input -> chunker.chunk(input, chunkingSettings);
} else {
var chunker = new WordBoundaryChunker();
chunkFunction = input -> chunker.chunk(input, wordsPerChunk, chunkOverlap);
} }
Chunker chunker = ChunkerBuilder.fromChunkingStrategy(chunkingSettings.getChunkingStrategy());
chunkedOffsets = new ArrayList<>(inputs.size()); this.requests = new ArrayList<>(inputs.size());
switch (embeddingType) {
case FLOAT -> floatResults = new ArrayList<>(inputs.size());
case BYTE -> byteResults = new ArrayList<>(inputs.size());
case SPARSE -> sparseResults = new ArrayList<>(inputs.size());
}
errors = new AtomicArray<>(inputs.size());
for (int i = 0; i < inputs.size(); i++) { for (int inputIndex = 0; inputIndex < inputs.size(); inputIndex++) {
var chunks = chunkFunction.apply(inputs.get(i)); List<ChunkOffset> chunks = chunker.chunk(inputs.get(inputIndex), chunkingSettings);
var offSetsAndInput = new ChunkOffsetsAndInput(chunks, inputs.get(i)); List<Request> requestForInput = new ArrayList<>(chunks.size());
int numberOfSubBatches = addToBatches(offSetsAndInput, i); for (int chunkIndex = 0; chunkIndex < chunks.size(); chunkIndex++) {
// size the results array with the expected number of request/responses requestForInput.add(new Request(inputIndex, chunkIndex, chunks.get(chunkIndex), inputs));
switch (embeddingType) {
case FLOAT -> floatResults.add(new AtomicArray<>(numberOfSubBatches));
case BYTE -> byteResults.add(new AtomicArray<>(numberOfSubBatches));
case SPARSE -> sparseResults.add(new AtomicArray<>(numberOfSubBatches));
} }
chunkedOffsets.add(offSetsAndInput); requests.add(requestForInput);
} // size the results array with the expected number of request/responses
} results.add(new AtomicReferenceArray<>(chunks.size()));
private int addToBatches(ChunkOffsetsAndInput chunk, int inputIndex) {
BatchRequest lastBatch;
if (batchedRequests.isEmpty()) {
lastBatch = new BatchRequest(new ArrayList<>());
batchedRequests.add(lastBatch);
} else {
lastBatch = batchedRequests.get(batchedRequests.size() - 1);
} }
int freeSpace = maxNumberOfInputsPerBatch - lastBatch.size(); AtomicInteger counter = new AtomicInteger();
assert freeSpace >= 0; this.batchRequests = requests.stream()
.flatMap(List::stream)
// chunks may span multiple batches, .collect(Collectors.groupingBy(it -> counter.getAndIncrement() / maxNumberOfInputsPerBatch))
// the chunkIndex keeps them ordered. .values()
int chunkIndex = 0; .stream()
.map(BatchRequest::new)
if (freeSpace > 0) { .toList();
// use any free space in the previous batch before creating new batches
int toAdd = Math.min(freeSpace, chunk.offsets().size());
lastBatch.addSubBatch(
new SubBatch(
new ChunkOffsetsAndInput(chunk.offsets().subList(0, toAdd), chunk.input()),
new SubBatchPositionsAndCount(inputIndex, chunkIndex++, toAdd)
)
);
}
int start = freeSpace;
while (start < chunk.offsets().size()) {
int toAdd = Math.min(maxNumberOfInputsPerBatch, chunk.offsets().size() - start);
var batch = new BatchRequest(new ArrayList<>());
batch.addSubBatch(
new SubBatch(
new ChunkOffsetsAndInput(chunk.offsets().subList(start, start + toAdd), chunk.input()),
new SubBatchPositionsAndCount(inputIndex, chunkIndex++, toAdd)
)
);
batchedRequests.add(batch);
start += toAdd;
}
return chunkIndex;
} }
/** /**
@ -191,23 +113,7 @@ public class EmbeddingRequestChunker {
*/ */
public List<BatchRequestAndListener> batchRequestsWithListeners(ActionListener<List<ChunkedInference>> finalListener) { public List<BatchRequestAndListener> batchRequestsWithListeners(ActionListener<List<ChunkedInference>> finalListener) {
this.finalListener = finalListener; this.finalListener = finalListener;
return batchRequests.stream().map(req -> new BatchRequestAndListener(req, new DebatchingListener(req))).toList();
int numberOfRequests = batchedRequests.size();
var requests = new ArrayList<BatchRequestAndListener>(numberOfRequests);
for (var batch : batchedRequests) {
requests.add(
new BatchRequestAndListener(
batch,
new DebatchingListener(
batch.subBatches().stream().map(SubBatch::positions).collect(Collectors.toList()),
numberOfRequests
)
)
);
}
return requests;
} }
/** /**
@ -220,266 +126,83 @@ public class EmbeddingRequestChunker {
*/ */
private class DebatchingListener implements ActionListener<InferenceServiceResults> { private class DebatchingListener implements ActionListener<InferenceServiceResults> {
private final List<SubBatchPositionsAndCount> positions; private final BatchRequest request;
private final int totalNumberOfRequests;
DebatchingListener(List<SubBatchPositionsAndCount> positions, int totalNumberOfRequests) { DebatchingListener(BatchRequest request) {
this.positions = positions; this.request = request;
this.totalNumberOfRequests = totalNumberOfRequests;
} }
@Override @Override
public void onResponse(InferenceServiceResults inferenceServiceResults) { public void onResponse(InferenceServiceResults inferenceServiceResults) {
switch (embeddingType) { if (inferenceServiceResults instanceof EmbeddingResults<?, ?> embeddingResults) {
case FLOAT -> handleFloatResults(inferenceServiceResults); if (embeddingResults.embeddings().size() != request.requests.size()) {
case BYTE -> handleByteResults(inferenceServiceResults); onFailure(numResultsDoesntMatchException(embeddingResults.embeddings().size(), request.requests.size()));
case SPARSE -> handleSparseResults(inferenceServiceResults);
}
}
private void handleFloatResults(InferenceServiceResults inferenceServiceResults) {
if (inferenceServiceResults instanceof InferenceTextEmbeddingFloatResults floatEmbeddings) {
if (failIfNumRequestsDoNotMatch(floatEmbeddings.embeddings().size())) {
return; return;
} }
for (int i = 0; i < embeddingResults.embeddings().size(); i++) {
int start = 0; results.get(request.requests().get(i).inputIndex())
for (var pos : positions) { .set(request.requests().get(i).chunkIndex(), embeddingResults.embeddings().get(i));
floatResults.get(pos.inputIndex())
.setOnce(pos.chunkIndex(), floatEmbeddings.embeddings().subList(start, start + pos.embeddingCount()));
start += pos.embeddingCount();
} }
if (resultCount.incrementAndGet() == batchRequests.size()) {
if (resultCount.incrementAndGet() == totalNumberOfRequests) { sendFinalResponse();
sendResponse();
} }
} else { } else {
onFailure( onFailure(unexpectedResultTypeException(inferenceServiceResults.getWriteableName()));
unexpectedResultTypeException(inferenceServiceResults.getWriteableName(), InferenceTextEmbeddingFloatResults.NAME)
);
} }
} }
private void handleByteResults(InferenceServiceResults inferenceServiceResults) { private ElasticsearchStatusException numResultsDoesntMatchException(int numResults, int numRequests) {
if (inferenceServiceResults instanceof InferenceTextEmbeddingByteResults byteEmbeddings) {
if (failIfNumRequestsDoNotMatch(byteEmbeddings.embeddings().size())) {
return;
}
int start = 0;
for (var pos : positions) {
byteResults.get(pos.inputIndex())
.setOnce(pos.chunkIndex(), byteEmbeddings.embeddings().subList(start, start + pos.embeddingCount()));
start += pos.embeddingCount();
}
if (resultCount.incrementAndGet() == totalNumberOfRequests) {
sendResponse();
}
} else {
onFailure(
unexpectedResultTypeException(inferenceServiceResults.getWriteableName(), InferenceTextEmbeddingByteResults.NAME)
);
}
}
private void handleSparseResults(InferenceServiceResults inferenceServiceResults) {
if (inferenceServiceResults instanceof SparseEmbeddingResults sparseEmbeddings) {
if (failIfNumRequestsDoNotMatch(sparseEmbeddings.embeddings().size())) {
return;
}
int start = 0;
for (var pos : positions) {
sparseResults.get(pos.inputIndex())
.setOnce(pos.chunkIndex(), sparseEmbeddings.embeddings().subList(start, start + pos.embeddingCount()));
start += pos.embeddingCount();
}
if (resultCount.incrementAndGet() == totalNumberOfRequests) {
sendResponse();
}
} else {
onFailure(
unexpectedResultTypeException(inferenceServiceResults.getWriteableName(), InferenceTextEmbeddingByteResults.NAME)
);
}
}
private boolean failIfNumRequestsDoNotMatch(int numberOfResults) {
int numberOfRequests = positions.stream().mapToInt(SubBatchPositionsAndCount::embeddingCount).sum();
if (numberOfRequests != numberOfResults) {
onFailure(
new ElasticsearchStatusException(
"Error the number of embedding responses [{}] does not equal the number of " + "requests [{}]",
RestStatus.INTERNAL_SERVER_ERROR,
numberOfResults,
numberOfRequests
)
);
return true;
}
return false;
}
private ElasticsearchStatusException unexpectedResultTypeException(String got, String expected) {
return new ElasticsearchStatusException( return new ElasticsearchStatusException(
"Unexpected inference result type [" + got + "], expected a [" + expected + "]", "Error the number of embedding responses [{}] does not equal the number of requests [{}]",
RestStatus.INTERNAL_SERVER_ERROR RestStatus.INTERNAL_SERVER_ERROR,
numResults,
numRequests
);
}
private ElasticsearchStatusException unexpectedResultTypeException(String resultType) {
return new ElasticsearchStatusException(
"Unexpected inference result type [{}], expected [EmbeddingResults]",
RestStatus.INTERNAL_SERVER_ERROR,
resultType
); );
} }
@Override @Override
public void onFailure(Exception e) { public void onFailure(Exception e) {
for (var pos : positions) { for (Request request : request.requests) {
errors.set(pos.inputIndex(), e); errors.set(request.inputIndex(), e);
} }
if (resultCount.incrementAndGet() == batchRequests.size()) {
if (resultCount.incrementAndGet() == totalNumberOfRequests) { sendFinalResponse();
sendResponse();
} }
} }
}
private void sendResponse() { private void sendFinalResponse() {
var response = new ArrayList<ChunkedInference>(chunkedOffsets.size()); var response = new ArrayList<ChunkedInference>(inputs.size());
for (int i = 0; i < chunkedOffsets.size(); i++) { for (int i = 0; i < inputs.size(); i++) {
if (errors.get(i) != null) { if (errors.get(i) != null) {
response.add(new ChunkedInferenceError(errors.get(i))); response.add(new ChunkedInferenceError(errors.get(i)));
} else { } else {
response.add(mergeResultsWithInputs(i)); response.add(mergeResultsWithInputs(i));
}
} }
finalListener.onResponse(response);
} }
finalListener.onResponse(response);
} }
private ChunkedInference mergeResultsWithInputs(int resultIndex) { private ChunkedInference mergeResultsWithInputs(int index) {
return switch (embeddingType) { List<EmbeddingResults.Chunk> chunks = new ArrayList<>();
case FLOAT -> mergeFloatResultsWithInputs(chunkedOffsets.get(resultIndex), floatResults.get(resultIndex)); List<Request> request = requests.get(index);
case BYTE -> mergeByteResultsWithInputs(chunkedOffsets.get(resultIndex), byteResults.get(resultIndex)); AtomicReferenceArray<EmbeddingResults.Embedding<?>> result = results.get(index);
case SPARSE -> mergeSparseResultsWithInputs(chunkedOffsets.get(resultIndex), sparseResults.get(resultIndex)); for (int i = 0; i < request.size(); i++) {
}; EmbeddingResults.Chunk chunk = result.get(i)
} .toChunk(
request.get(i).chunkText(),
private ChunkedInferenceEmbeddingFloat mergeFloatResultsWithInputs( new ChunkedInference.TextOffset(request.get(i).chunk.start(), request.get(i).chunk.end())
ChunkOffsetsAndInput chunks, );
AtomicArray<List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding>> debatchedResults chunks.add(chunk);
) {
var all = new ArrayList<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding>();
for (int i = 0; i < debatchedResults.length(); i++) {
var subBatch = debatchedResults.get(i);
all.addAll(subBatch);
}
assert chunks.size() == all.size();
var embeddingChunks = new ArrayList<ChunkedInferenceEmbeddingFloat.FloatEmbeddingChunk>();
for (int i = 0; i < chunks.size(); i++) {
embeddingChunks.add(
new ChunkedInferenceEmbeddingFloat.FloatEmbeddingChunk(
all.get(i).values(),
chunks.chunkText(i),
new ChunkedInference.TextOffset(chunks.offsets().get(i).start(), chunks.offsets().get(i).end())
)
);
}
return new ChunkedInferenceEmbeddingFloat(embeddingChunks);
}
private ChunkedInferenceEmbeddingByte mergeByteResultsWithInputs(
ChunkOffsetsAndInput chunks,
AtomicArray<List<InferenceByteEmbedding>> debatchedResults
) {
var all = new ArrayList<InferenceByteEmbedding>();
for (int i = 0; i < debatchedResults.length(); i++) {
var subBatch = debatchedResults.get(i);
all.addAll(subBatch);
}
assert chunks.size() == all.size();
var embeddingChunks = new ArrayList<ChunkedInferenceEmbeddingByte.ByteEmbeddingChunk>();
for (int i = 0; i < chunks.size(); i++) {
embeddingChunks.add(
new ChunkedInferenceEmbeddingByte.ByteEmbeddingChunk(
all.get(i).values(),
chunks.chunkText(i),
new ChunkedInference.TextOffset(chunks.offsets().get(i).start(), chunks.offsets().get(i).end())
)
);
}
return new ChunkedInferenceEmbeddingByte(embeddingChunks);
}
private ChunkedInferenceEmbeddingSparse mergeSparseResultsWithInputs(
ChunkOffsetsAndInput chunks,
AtomicArray<List<SparseEmbeddingResults.Embedding>> debatchedResults
) {
var all = new ArrayList<SparseEmbeddingResults.Embedding>();
for (int i = 0; i < debatchedResults.length(); i++) {
var subBatch = debatchedResults.get(i);
all.addAll(subBatch);
}
assert chunks.size() == all.size();
var embeddingChunks = new ArrayList<ChunkedInferenceEmbeddingSparse.SparseEmbeddingChunk>();
for (int i = 0; i < chunks.size(); i++) {
embeddingChunks.add(
new ChunkedInferenceEmbeddingSparse.SparseEmbeddingChunk(
all.get(i).tokens(),
chunks.chunkText(i),
new ChunkedInference.TextOffset(chunks.offsets().get(i).start(), chunks.offsets().get(i).end())
)
);
}
return new ChunkedInferenceEmbeddingSparse(embeddingChunks);
}
public record BatchRequest(List<SubBatch> subBatches) {
public int size() {
return subBatches.stream().mapToInt(SubBatch::size).sum();
}
public void addSubBatch(SubBatch sb) {
subBatches.add(sb);
}
public List<String> inputs() {
return subBatches.stream().flatMap(s -> s.requests().toChunkText().stream()).collect(Collectors.toList());
}
}
public record BatchRequestAndListener(BatchRequest batch, ActionListener<InferenceServiceResults> listener) {
}
/**
* Used for mapping batched requests back to the original input
*/
record SubBatchPositionsAndCount(int inputIndex, int chunkIndex, int embeddingCount) {}
record SubBatch(ChunkOffsetsAndInput requests, SubBatchPositionsAndCount positions) {
int size() {
return requests.offsets().size();
}
}
record ChunkOffsetsAndInput(List<Chunker.ChunkOffset> offsets, String input) {
List<String> toChunkText() {
return offsets.stream().map(o -> input.substring(o.start(), o.end())).collect(Collectors.toList());
}
int size() {
return offsets.size();
}
String chunkText(int index) {
return input.substring(offsets.get(index).start(), offsets.get(index).end());
} }
return new ChunkedInferenceEmbedding(chunks);
} }
} }

View file

@ -9,7 +9,7 @@ package org.elasticsearch.xpack.inference.external.response.alibabacloudsearch;
import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.HttpResult;
import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.request.Request;
@ -70,21 +70,20 @@ public class AlibabaCloudSearchEmbeddingsResponseEntity extends AlibabaCloudSear
* </code> * </code>
* </pre> * </pre>
*/ */
public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { public static TextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException {
return fromResponse(request, response, parser -> { return fromResponse(request, response, parser -> {
positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE);
List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> embeddingList = XContentParserUtils.parseList( List<TextEmbeddingFloatResults.Embedding> embeddingList = XContentParserUtils.parseList(
parser, parser,
AlibabaCloudSearchEmbeddingsResponseEntity::parseEmbeddingObject AlibabaCloudSearchEmbeddingsResponseEntity::parseEmbeddingObject
); );
return new InferenceTextEmbeddingFloatResults(embeddingList); return new TextEmbeddingFloatResults(embeddingList);
}); });
} }
private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingObject(XContentParser parser) private static TextEmbeddingFloatResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException {
throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE);
@ -96,7 +95,7 @@ public class AlibabaCloudSearchEmbeddingsResponseEntity extends AlibabaCloudSear
// if there are additional fields within this object, lets skip them, so we can begin parsing the next embedding array // if there are additional fields within this object, lets skip them, so we can begin parsing the next embedding array
parser.skipChildren(); parser.skipChildren();
return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValues); return TextEmbeddingFloatResults.Embedding.of(embeddingValues);
} }
private static float parseEmbeddingList(XContentParser parser) throws IOException { private static float parseEmbeddingList(XContentParser parser) throws IOException {

View file

@ -16,7 +16,7 @@ import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest;
import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockEmbeddingsRequest; import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockEmbeddingsRequest;
import org.elasticsearch.xpack.inference.external.response.XContentUtils; import org.elasticsearch.xpack.inference.external.response.XContentUtils;
@ -48,7 +48,7 @@ public class AmazonBedrockEmbeddingsResponse extends AmazonBedrockResponse {
throw new ElasticsearchException("unexpected request type [" + request.getClass() + "]"); throw new ElasticsearchException("unexpected request type [" + request.getClass() + "]");
} }
public static InferenceTextEmbeddingFloatResults fromResponse(InvokeModelResponse response, AmazonBedrockProvider provider) { public static TextEmbeddingFloatResults fromResponse(InvokeModelResponse response, AmazonBedrockProvider provider) {
var charset = StandardCharsets.UTF_8; var charset = StandardCharsets.UTF_8;
var bodyText = String.valueOf(charset.decode(response.body().asByteBuffer())); var bodyText = String.valueOf(charset.decode(response.body().asByteBuffer()));
@ -63,16 +63,14 @@ public class AmazonBedrockEmbeddingsResponse extends AmazonBedrockResponse {
var embeddingList = parseEmbeddings(jsonParser, provider); var embeddingList = parseEmbeddings(jsonParser, provider);
return new InferenceTextEmbeddingFloatResults(embeddingList); return new TextEmbeddingFloatResults(embeddingList);
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchException(e); throw new ElasticsearchException(e);
} }
} }
private static List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> parseEmbeddings( private static List<TextEmbeddingFloatResults.Embedding> parseEmbeddings(XContentParser jsonParser, AmazonBedrockProvider provider)
XContentParser jsonParser, throws IOException {
AmazonBedrockProvider provider
) throws IOException {
switch (provider) { switch (provider) {
case AMAZONTITAN -> { case AMAZONTITAN -> {
return parseTitanEmbeddings(jsonParser); return parseTitanEmbeddings(jsonParser);
@ -84,8 +82,7 @@ public class AmazonBedrockEmbeddingsResponse extends AmazonBedrockResponse {
} }
} }
private static List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> parseTitanEmbeddings(XContentParser parser) private static List<TextEmbeddingFloatResults.Embedding> parseTitanEmbeddings(XContentParser parser) throws IOException {
throws IOException {
/* /*
Titan response: Titan response:
{ {
@ -95,12 +92,11 @@ public class AmazonBedrockEmbeddingsResponse extends AmazonBedrockResponse {
*/ */
positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE);
List<Float> embeddingValuesList = parseList(parser, XContentUtils::parseFloat); List<Float> embeddingValuesList = parseList(parser, XContentUtils::parseFloat);
var embeddingValues = InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); var embeddingValues = TextEmbeddingFloatResults.Embedding.of(embeddingValuesList);
return List.of(embeddingValues); return List.of(embeddingValues);
} }
private static List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> parseCohereEmbeddings(XContentParser parser) private static List<TextEmbeddingFloatResults.Embedding> parseCohereEmbeddings(XContentParser parser) throws IOException {
throws IOException {
/* /*
Cohere response: Cohere response:
{ {
@ -115,7 +111,7 @@ public class AmazonBedrockEmbeddingsResponse extends AmazonBedrockResponse {
*/ */
positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE);
List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> embeddingList = parseList( List<TextEmbeddingFloatResults.Embedding> embeddingList = parseList(
parser, parser,
AmazonBedrockEmbeddingsResponse::parseCohereEmbeddingsListItem AmazonBedrockEmbeddingsResponse::parseCohereEmbeddingsListItem
); );
@ -123,10 +119,9 @@ public class AmazonBedrockEmbeddingsResponse extends AmazonBedrockResponse {
return embeddingList; return embeddingList;
} }
private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseCohereEmbeddingsListItem(XContentParser parser) private static TextEmbeddingFloatResults.Embedding parseCohereEmbeddingsListItem(XContentParser parser) throws IOException {
throws IOException {
List<Float> embeddingValuesList = parseList(parser, XContentUtils::parseFloat); List<Float> embeddingValuesList = parseList(parser, XContentUtils::parseFloat);
return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); return TextEmbeddingFloatResults.Embedding.of(embeddingValuesList);
} }
} }

View file

@ -17,10 +17,9 @@ import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.core.inference.results.InferenceByteEmbedding; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingBitResults;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingBitResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults;
import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.HttpResult;
import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.request.Request;
import org.elasticsearch.xpack.inference.external.response.XContentUtils; import org.elasticsearch.xpack.inference.external.response.XContentUtils;
@ -192,20 +191,20 @@ public class CohereEmbeddingsResponseEntity {
// Cohere returns array of binary embeddings encoded as bytes with int8 precision so we can reuse the byte parser // Cohere returns array of binary embeddings encoded as bytes with int8 precision so we can reuse the byte parser
var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseByteArrayEntry); var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseByteArrayEntry);
return new InferenceTextEmbeddingBitResults(embeddingList); return new TextEmbeddingBitResults(embeddingList);
} }
private static InferenceServiceResults parseByteEmbeddingsArray(XContentParser parser) throws IOException { private static InferenceServiceResults parseByteEmbeddingsArray(XContentParser parser) throws IOException {
var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseByteArrayEntry); var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseByteArrayEntry);
return new InferenceTextEmbeddingByteResults(embeddingList); return new TextEmbeddingByteResults(embeddingList);
} }
private static InferenceByteEmbedding parseByteArrayEntry(XContentParser parser) throws IOException { private static TextEmbeddingByteResults.Embedding parseByteArrayEntry(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser);
List<Byte> embeddingValuesList = parseList(parser, CohereEmbeddingsResponseEntity::parseEmbeddingInt8Entry); List<Byte> embeddingValuesList = parseList(parser, CohereEmbeddingsResponseEntity::parseEmbeddingInt8Entry);
return InferenceByteEmbedding.of(embeddingValuesList); return TextEmbeddingByteResults.Embedding.of(embeddingValuesList);
} }
private static Byte parseEmbeddingInt8Entry(XContentParser parser) throws IOException { private static Byte parseEmbeddingInt8Entry(XContentParser parser) throws IOException {
@ -226,14 +225,13 @@ public class CohereEmbeddingsResponseEntity {
private static InferenceServiceResults parseFloatEmbeddingsArray(XContentParser parser) throws IOException { private static InferenceServiceResults parseFloatEmbeddingsArray(XContentParser parser) throws IOException {
var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseFloatArrayEntry); var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseFloatArrayEntry);
return new InferenceTextEmbeddingFloatResults(embeddingList); return new TextEmbeddingFloatResults(embeddingList);
} }
private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseFloatArrayEntry(XContentParser parser) private static TextEmbeddingFloatResults.Embedding parseFloatArrayEntry(XContentParser parser) throws IOException {
throws IOException {
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser);
List<Float> embeddingValuesList = parseList(parser, XContentUtils::parseFloat); List<Float> embeddingValuesList = parseList(parser, XContentUtils::parseFloat);
return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); return TextEmbeddingFloatResults.Embedding.of(embeddingValuesList);
} }
private CohereEmbeddingsResponseEntity() {} private CohereEmbeddingsResponseEntity() {}

View file

@ -12,7 +12,7 @@ import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.HttpResult;
import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.request.Request;
import org.elasticsearch.xpack.inference.external.response.XContentUtils; import org.elasticsearch.xpack.inference.external.response.XContentUtils;
@ -70,7 +70,7 @@ public class GoogleAiStudioEmbeddingsResponseEntity {
* </pre> * </pre>
*/ */
public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { public static TextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException {
var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE);
try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) {
@ -81,17 +81,16 @@ public class GoogleAiStudioEmbeddingsResponseEntity {
positionParserAtTokenAfterField(jsonParser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(jsonParser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE);
List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> embeddingList = parseList( List<TextEmbeddingFloatResults.Embedding> embeddingList = parseList(
jsonParser, jsonParser,
GoogleAiStudioEmbeddingsResponseEntity::parseEmbeddingObject GoogleAiStudioEmbeddingsResponseEntity::parseEmbeddingObject
); );
return new InferenceTextEmbeddingFloatResults(embeddingList); return new TextEmbeddingFloatResults(embeddingList);
} }
} }
private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingObject(XContentParser parser) private static TextEmbeddingFloatResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException {
throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
positionParserAtTokenAfterField(parser, "values", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(parser, "values", FAILED_TO_FIND_FIELD_TEMPLATE);
@ -100,7 +99,7 @@ public class GoogleAiStudioEmbeddingsResponseEntity {
// parse and discard the rest of the object // parse and discard the rest of the object
consumeUntilObjectEnd(parser); consumeUntilObjectEnd(parser);
return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); return TextEmbeddingFloatResults.Embedding.of(embeddingValuesList);
} }
private GoogleAiStudioEmbeddingsResponseEntity() {} private GoogleAiStudioEmbeddingsResponseEntity() {}

View file

@ -13,7 +13,7 @@ import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.HttpResult;
import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.request.Request;
@ -64,7 +64,7 @@ public class GoogleVertexAiEmbeddingsResponseEntity {
* </pre> * </pre>
*/ */
public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { public static TextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException {
var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE);
try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) {
@ -75,17 +75,16 @@ public class GoogleVertexAiEmbeddingsResponseEntity {
positionParserAtTokenAfterField(jsonParser, "predictions", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(jsonParser, "predictions", FAILED_TO_FIND_FIELD_TEMPLATE);
List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> embeddingList = parseList( List<TextEmbeddingFloatResults.Embedding> embeddingList = parseList(
jsonParser, jsonParser,
GoogleVertexAiEmbeddingsResponseEntity::parseEmbeddingObject GoogleVertexAiEmbeddingsResponseEntity::parseEmbeddingObject
); );
return new InferenceTextEmbeddingFloatResults(embeddingList); return new TextEmbeddingFloatResults(embeddingList);
} }
} }
private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingObject(XContentParser parser) private static TextEmbeddingFloatResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException {
throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE);
@ -100,7 +99,7 @@ public class GoogleVertexAiEmbeddingsResponseEntity {
consumeUntilObjectEnd(parser); consumeUntilObjectEnd(parser);
consumeUntilObjectEnd(parser); consumeUntilObjectEnd(parser);
return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValueList); return TextEmbeddingFloatResults.Embedding.of(embeddingValueList);
} }
private static float parseEmbeddingList(XContentParser parser) throws IOException { private static float parseEmbeddingList(XContentParser parser) throws IOException {

View file

@ -14,7 +14,7 @@ import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.HttpResult;
import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.request.Request;
import org.elasticsearch.xpack.inference.external.response.XContentUtils; import org.elasticsearch.xpack.inference.external.response.XContentUtils;
@ -35,7 +35,7 @@ public class HuggingFaceEmbeddingsResponseEntity {
* Parse the response from hugging face. The known formats are an array of arrays and object with an {@code embeddings} field containing * Parse the response from hugging face. The known formats are an array of arrays and object with an {@code embeddings} field containing
* an array of arrays. * an array of arrays.
*/ */
public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { public static TextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException {
var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE);
try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) {
@ -93,13 +93,13 @@ public class HuggingFaceEmbeddingsResponseEntity {
* <a href="https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2">sentence-transformers/all-MiniLM-L6-v2</a> * <a href="https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2">sentence-transformers/all-MiniLM-L6-v2</a>
* <a href="https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2">sentence-transformers/all-MiniLM-L12-v2</a> * <a href="https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2">sentence-transformers/all-MiniLM-L12-v2</a>
*/ */
private static InferenceTextEmbeddingFloatResults parseArrayFormat(XContentParser parser) throws IOException { private static TextEmbeddingFloatResults parseArrayFormat(XContentParser parser) throws IOException {
List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> embeddingList = parseList( List<TextEmbeddingFloatResults.Embedding> embeddingList = parseList(
parser, parser,
HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry
); );
return new InferenceTextEmbeddingFloatResults(embeddingList); return new TextEmbeddingFloatResults(embeddingList);
} }
/** /**
@ -138,23 +138,22 @@ public class HuggingFaceEmbeddingsResponseEntity {
* <a href="https://huggingface.co/intfloat/multilingual-e5-small">intfloat/multilingual-e5-small</a> * <a href="https://huggingface.co/intfloat/multilingual-e5-small">intfloat/multilingual-e5-small</a>
* <a href="https://huggingface.co/sentence-transformers/all-mpnet-base-v2">sentence-transformers/all-mpnet-base-v2</a> * <a href="https://huggingface.co/sentence-transformers/all-mpnet-base-v2">sentence-transformers/all-mpnet-base-v2</a>
*/ */
private static InferenceTextEmbeddingFloatResults parseObjectFormat(XContentParser parser) throws IOException { private static TextEmbeddingFloatResults parseObjectFormat(XContentParser parser) throws IOException {
positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE);
List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> embeddingList = parseList( List<TextEmbeddingFloatResults.Embedding> embeddingList = parseList(
parser, parser,
HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry
); );
return new InferenceTextEmbeddingFloatResults(embeddingList); return new TextEmbeddingFloatResults(embeddingList);
} }
private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingEntry(XContentParser parser) private static TextEmbeddingFloatResults.Embedding parseEmbeddingEntry(XContentParser parser) throws IOException {
throws IOException {
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser);
List<Float> embeddingValuesList = parseList(parser, XContentUtils::parseFloat); List<Float> embeddingValuesList = parseList(parser, XContentUtils::parseFloat);
return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); return TextEmbeddingFloatResults.Embedding.of(embeddingValuesList);
} }
private HuggingFaceEmbeddingsResponseEntity() {} private HuggingFaceEmbeddingsResponseEntity() {}

View file

@ -12,7 +12,7 @@ import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.HttpResult;
import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.request.Request;
import org.elasticsearch.xpack.inference.external.response.XContentUtils; import org.elasticsearch.xpack.inference.external.response.XContentUtils;
@ -30,7 +30,7 @@ public class IbmWatsonxEmbeddingsResponseEntity {
private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in IBM Watsonx embeddings response"; private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in IBM Watsonx embeddings response";
public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { public static TextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException {
var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE);
try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) {
@ -41,17 +41,16 @@ public class IbmWatsonxEmbeddingsResponseEntity {
positionParserAtTokenAfterField(jsonParser, "results", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(jsonParser, "results", FAILED_TO_FIND_FIELD_TEMPLATE);
List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> embeddingList = parseList( List<TextEmbeddingFloatResults.Embedding> embeddingList = parseList(
jsonParser, jsonParser,
IbmWatsonxEmbeddingsResponseEntity::parseEmbeddingObject IbmWatsonxEmbeddingsResponseEntity::parseEmbeddingObject
); );
return new InferenceTextEmbeddingFloatResults(embeddingList); return new TextEmbeddingFloatResults(embeddingList);
} }
} }
private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingObject(XContentParser parser) private static TextEmbeddingFloatResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException {
throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE);
@ -60,7 +59,7 @@ public class IbmWatsonxEmbeddingsResponseEntity {
// parse and discard the rest of the object // parse and discard the rest of the object
consumeUntilObjectEnd(parser); consumeUntilObjectEnd(parser);
return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); return TextEmbeddingFloatResults.Embedding.of(embeddingValuesList);
} }
private IbmWatsonxEmbeddingsResponseEntity() {} private IbmWatsonxEmbeddingsResponseEntity() {}

View file

@ -14,7 +14,7 @@ import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.HttpResult;
import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.request.Request;
import org.elasticsearch.xpack.inference.external.response.XContentUtils; import org.elasticsearch.xpack.inference.external.response.XContentUtils;
@ -73,7 +73,7 @@ public class JinaAIEmbeddingsResponseEntity {
* </code> * </code>
* </pre> * </pre>
*/ */
public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { public static TextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException {
var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE);
try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) {
@ -84,17 +84,16 @@ public class JinaAIEmbeddingsResponseEntity {
positionParserAtTokenAfterField(jsonParser, "data", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(jsonParser, "data", FAILED_TO_FIND_FIELD_TEMPLATE);
List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> embeddingList = parseList( List<TextEmbeddingFloatResults.Embedding> embeddingList = parseList(
jsonParser, jsonParser,
JinaAIEmbeddingsResponseEntity::parseEmbeddingObject JinaAIEmbeddingsResponseEntity::parseEmbeddingObject
); );
return new InferenceTextEmbeddingFloatResults(embeddingList); return new TextEmbeddingFloatResults(embeddingList);
} }
} }
private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingObject(XContentParser parser) private static TextEmbeddingFloatResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException {
throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE);
@ -103,7 +102,7 @@ public class JinaAIEmbeddingsResponseEntity {
// parse and discard the rest of the object // parse and discard the rest of the object
consumeUntilObjectEnd(parser); consumeUntilObjectEnd(parser);
return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); return TextEmbeddingFloatResults.Embedding.of(embeddingValuesList);
} }
private JinaAIEmbeddingsResponseEntity() {} private JinaAIEmbeddingsResponseEntity() {}

View file

@ -14,7 +14,7 @@ import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.HttpResult;
import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.request.Request;
import org.elasticsearch.xpack.inference.external.response.XContentUtils; import org.elasticsearch.xpack.inference.external.response.XContentUtils;
@ -74,7 +74,7 @@ public class OpenAiEmbeddingsResponseEntity {
* </code> * </code>
* </pre> * </pre>
*/ */
public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { public static TextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException {
var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE);
try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) {
@ -85,17 +85,16 @@ public class OpenAiEmbeddingsResponseEntity {
positionParserAtTokenAfterField(jsonParser, "data", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(jsonParser, "data", FAILED_TO_FIND_FIELD_TEMPLATE);
List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> embeddingList = parseList( List<TextEmbeddingFloatResults.Embedding> embeddingList = parseList(
jsonParser, jsonParser,
OpenAiEmbeddingsResponseEntity::parseEmbeddingObject OpenAiEmbeddingsResponseEntity::parseEmbeddingObject
); );
return new InferenceTextEmbeddingFloatResults(embeddingList); return new TextEmbeddingFloatResults(embeddingList);
} }
} }
private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingObject(XContentParser parser) private static TextEmbeddingFloatResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException {
throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE);
@ -104,7 +103,7 @@ public class OpenAiEmbeddingsResponseEntity {
// parse and discard the rest of the object // parse and discard the rest of the object
consumeUntilObjectEnd(parser); consumeUntilObjectEnd(parser);
return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); return TextEmbeddingFloatResults.Embedding.of(embeddingValuesList);
} }
private OpenAiEmbeddingsResponseEntity() {} private OpenAiEmbeddingsResponseEntity() {}

View file

@ -17,10 +17,9 @@ import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.core.inference.results.InferenceByteEmbedding; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingBitResults;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingBitResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults;
import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.HttpResult;
import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.request.Request;
import org.elasticsearch.xpack.inference.external.request.voyageai.VoyageAIEmbeddingsRequest; import org.elasticsearch.xpack.inference.external.request.voyageai.VoyageAIEmbeddingsRequest;
@ -78,9 +77,9 @@ public class VoyageAIEmbeddingsResponseEntity {
} }
} }
public InferenceByteEmbedding toInferenceByteEmbedding() { public TextEmbeddingByteResults.Embedding toInferenceByteEmbedding() {
embedding.forEach(EmbeddingInt8ResultEntry::checkByteBounds); embedding.forEach(EmbeddingInt8ResultEntry::checkByteBounds);
return InferenceByteEmbedding.of(embedding.stream().map(Integer::byteValue).toList()); return TextEmbeddingByteResults.Embedding.of(embedding.stream().map(Integer::byteValue).toList());
} }
} }
@ -111,8 +110,8 @@ public class VoyageAIEmbeddingsResponseEntity {
PARSER.declareFloatArray(constructorArg(), new ParseField("embedding")); PARSER.declareFloatArray(constructorArg(), new ParseField("embedding"));
} }
public InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding toInferenceFloatEmbedding() { public TextEmbeddingFloatResults.Embedding toInferenceFloatEmbedding() {
return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embedding); return TextEmbeddingFloatResults.Embedding.of(embedding);
} }
} }
@ -169,22 +168,22 @@ public class VoyageAIEmbeddingsResponseEntity {
if (embeddingType == null || embeddingType == VoyageAIEmbeddingType.FLOAT) { if (embeddingType == null || embeddingType == VoyageAIEmbeddingType.FLOAT) {
var embeddingResult = EmbeddingFloatResult.PARSER.apply(jsonParser, null); var embeddingResult = EmbeddingFloatResult.PARSER.apply(jsonParser, null);
List<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding> embeddingList = embeddingResult.entries.stream() List<TextEmbeddingFloatResults.Embedding> embeddingList = embeddingResult.entries.stream()
.map(EmbeddingFloatResultEntry::toInferenceFloatEmbedding) .map(EmbeddingFloatResultEntry::toInferenceFloatEmbedding)
.toList(); .toList();
return new InferenceTextEmbeddingFloatResults(embeddingList); return new TextEmbeddingFloatResults(embeddingList);
} else if (embeddingType == VoyageAIEmbeddingType.INT8) { } else if (embeddingType == VoyageAIEmbeddingType.INT8) {
var embeddingResult = EmbeddingInt8Result.PARSER.apply(jsonParser, null); var embeddingResult = EmbeddingInt8Result.PARSER.apply(jsonParser, null);
List<InferenceByteEmbedding> embeddingList = embeddingResult.entries.stream() List<TextEmbeddingByteResults.Embedding> embeddingList = embeddingResult.entries.stream()
.map(EmbeddingInt8ResultEntry::toInferenceByteEmbedding) .map(EmbeddingInt8ResultEntry::toInferenceByteEmbedding)
.toList(); .toList();
return new InferenceTextEmbeddingByteResults(embeddingList); return new TextEmbeddingByteResults(embeddingList);
} else if (embeddingType == VoyageAIEmbeddingType.BIT || embeddingType == VoyageAIEmbeddingType.BINARY) { } else if (embeddingType == VoyageAIEmbeddingType.BIT || embeddingType == VoyageAIEmbeddingType.BINARY) {
var embeddingResult = EmbeddingInt8Result.PARSER.apply(jsonParser, null); var embeddingResult = EmbeddingInt8Result.PARSER.apply(jsonParser, null);
List<InferenceByteEmbedding> embeddingList = embeddingResult.entries.stream() List<TextEmbeddingByteResults.Embedding> embeddingList = embeddingResult.entries.stream()
.map(EmbeddingInt8ResultEntry::toInferenceByteEmbedding) .map(EmbeddingInt8ResultEntry::toInferenceByteEmbedding)
.toList(); .toList();
return new InferenceTextEmbeddingBitResults(embeddingList); return new TextEmbeddingBitResults(embeddingList);
} else { } else {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Illegal embedding_type value: " + embeddingType + ". Supported types are: " + VALID_EMBEDDING_TYPES_STRING "Illegal embedding_type value: " + embeddingType + ". Supported types are: " + VALID_EMBEDDING_TYPES_STRING

View file

@ -22,8 +22,8 @@ import org.elasticsearch.inference.SimilarityMeasure;
import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.TaskType;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.action.InferenceAction;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.core.inference.results.TextEmbedding; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults;
import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings;
import org.elasticsearch.xpack.inference.services.settings.ApiKeySecrets; import org.elasticsearch.xpack.inference.services.settings.ApiKeySecrets;
@ -741,7 +741,7 @@ public final class ServiceUtils {
InputType.INGEST, InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT, InferenceAction.Request.DEFAULT_TIMEOUT,
listener.delegateFailureAndWrap((delegate, r) -> { listener.delegateFailureAndWrap((delegate, r) -> {
if (r instanceof TextEmbedding embeddingResults) { if (r instanceof TextEmbeddingResults<?, ?> embeddingResults) {
try { try {
delegate.onResponse(embeddingResults.getFirstEmbeddingSize()); delegate.onResponse(embeddingResults.getFirstEmbeddingSize());
} catch (Exception e) { } catch (Exception e) {
@ -754,7 +754,7 @@ public final class ServiceUtils {
new ElasticsearchStatusException( new ElasticsearchStatusException(
"Could not determine embedding size. " "Could not determine embedding size. "
+ "Expected a result of type [" + "Expected a result of type ["
+ InferenceTextEmbeddingFloatResults.NAME + TextEmbeddingFloatResults.NAME
+ "] got [" + "] got ["
+ r.getWriteableName() + r.getWriteableName()
+ "]", + "]",

View file

@ -308,7 +308,6 @@ public class AlibabaCloudSearchService extends SenderService {
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
inputs.getInputs(), inputs.getInputs(),
EMBEDDING_MAX_BATCH_SIZE, EMBEDDING_MAX_BATCH_SIZE,
getEmbeddingTypeFromTaskType(alibabaCloudSearchModel.getTaskType()),
alibabaCloudSearchModel.getConfigurations().getChunkingSettings() alibabaCloudSearchModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);
@ -318,14 +317,6 @@ public class AlibabaCloudSearchService extends SenderService {
} }
} }
private EmbeddingRequestChunker.EmbeddingType getEmbeddingTypeFromTaskType(TaskType taskType) {
return switch (taskType) {
case TEXT_EMBEDDING -> EmbeddingRequestChunker.EmbeddingType.FLOAT;
case SPARSE_EMBEDDING -> EmbeddingRequestChunker.EmbeddingType.SPARSE;
default -> throw new IllegalArgumentException("Unsupported task type for chunking: " + taskType);
};
}
/** /**
* For text embedding models get the embedding size and * For text embedding models get the embedding size and
* update the service settings. * update the service settings.

View file

@ -132,7 +132,6 @@ public class AmazonBedrockService extends SenderService {
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
inputs.getInputs(), inputs.getInputs(),
maxBatchSize, maxBatchSize,
EmbeddingRequestChunker.EmbeddingType.FLOAT,
baseAmazonBedrockModel.getConfigurations().getChunkingSettings() baseAmazonBedrockModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);

View file

@ -124,7 +124,6 @@ public class AzureAiStudioService extends SenderService {
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
inputs.getInputs(), inputs.getInputs(),
EMBEDDING_MAX_BATCH_SIZE, EMBEDDING_MAX_BATCH_SIZE,
EmbeddingRequestChunker.EmbeddingType.FLOAT,
baseAzureAiStudioModel.getConfigurations().getChunkingSettings() baseAzureAiStudioModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);

View file

@ -284,7 +284,6 @@ public class AzureOpenAiService extends SenderService {
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
inputs.getInputs(), inputs.getInputs(),
EMBEDDING_MAX_BATCH_SIZE, EMBEDDING_MAX_BATCH_SIZE,
EmbeddingRequestChunker.EmbeddingType.FLOAT,
azureOpenAiModel.getConfigurations().getChunkingSettings() azureOpenAiModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);

View file

@ -286,7 +286,6 @@ public class CohereService extends SenderService {
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
inputs.getInputs(), inputs.getInputs(),
EMBEDDING_MAX_BATCH_SIZE, EMBEDDING_MAX_BATCH_SIZE,
EmbeddingRequestChunker.EmbeddingType.fromDenseVectorElementType(model.getServiceSettings().elementType()),
cohereModel.getConfigurations().getChunkingSettings() cohereModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);

View file

@ -33,7 +33,7 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.action.InferenceAction;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbedding;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError;
import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults;
import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults;
@ -578,7 +578,7 @@ public class ElasticInferenceService extends SenderService {
private static List<ChunkedInference> translateToChunkedResults(InferenceInputs inputs, InferenceServiceResults inferenceResults) { private static List<ChunkedInference> translateToChunkedResults(InferenceInputs inputs, InferenceServiceResults inferenceResults) {
if (inferenceResults instanceof SparseEmbeddingResults sparseEmbeddingResults) { if (inferenceResults instanceof SparseEmbeddingResults sparseEmbeddingResults) {
var inputsAsList = DocumentsOnlyInput.of(inputs).getInputs(); var inputsAsList = DocumentsOnlyInput.of(inputs).getInputs();
return ChunkedInferenceEmbeddingSparse.listOf(inputsAsList, sparseEmbeddingResults); return ChunkedInferenceEmbedding.listOf(inputsAsList, sparseEmbeddingResults);
} else if (inferenceResults instanceof ErrorInferenceResults error) { } else if (inferenceResults instanceof ErrorInferenceResults error) {
return List.of(new ChunkedInferenceError(error.getException())); return List.of(new ChunkedInferenceError(error.getException()));
} else { } else {

View file

@ -35,9 +35,9 @@ import org.elasticsearch.inference.UnifiedCompletionRequest;
import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults;
import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults;
import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults;
import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction;
import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction;
import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction;
@ -635,7 +635,7 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi
); );
ActionListener<InferModelAction.Response> mlResultsListener = listener.delegateFailureAndWrap( ActionListener<InferModelAction.Response> mlResultsListener = listener.delegateFailureAndWrap(
(l, inferenceResult) -> l.onResponse(InferenceTextEmbeddingFloatResults.of(inferenceResult.getInferenceResults())) (l, inferenceResult) -> l.onResponse(TextEmbeddingFloatResults.of(inferenceResult.getInferenceResults()))
); );
var maybeDeployListener = mlResultsListener.delegateResponse( var maybeDeployListener = mlResultsListener.delegateResponse(
@ -728,7 +728,6 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
input, input,
EMBEDDING_MAX_BATCH_SIZE, EMBEDDING_MAX_BATCH_SIZE,
embeddingTypeFromTaskTypeAndSettings(model.getTaskType(), esModel.internalServiceSettings),
esModel.getConfigurations().getChunkingSettings() esModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);
@ -751,13 +750,11 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi
ActionListener<InferenceServiceResults> chunkPartListener ActionListener<InferenceServiceResults> chunkPartListener
) { ) {
if (taskType == TaskType.TEXT_EMBEDDING) { if (taskType == TaskType.TEXT_EMBEDDING) {
var translated = new ArrayList<InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding>(); var translated = new ArrayList<TextEmbeddingFloatResults.Embedding>();
for (var inferenceResult : inferenceResults) { for (var inferenceResult : inferenceResults) {
if (inferenceResult instanceof MlTextEmbeddingResults mlTextEmbeddingResult) { if (inferenceResult instanceof MlTextEmbeddingResults mlTextEmbeddingResult) {
translated.add( translated.add(new TextEmbeddingFloatResults.Embedding(mlTextEmbeddingResult.getInferenceAsFloat()));
new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(mlTextEmbeddingResult.getInferenceAsFloat())
);
} else if (inferenceResult instanceof ErrorInferenceResults error) { } else if (inferenceResult instanceof ErrorInferenceResults error) {
chunkPartListener.onFailure(error.getException()); chunkPartListener.onFailure(error.getException());
return; return;
@ -768,7 +765,7 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi
return; return;
} }
} }
chunkPartListener.onResponse(new InferenceTextEmbeddingFloatResults(translated)); chunkPartListener.onResponse(new TextEmbeddingFloatResults(translated));
} else { // sparse } else { // sparse
var translated = new ArrayList<SparseEmbeddingResults.Embedding>(); var translated = new ArrayList<SparseEmbeddingResults.Embedding>();
@ -946,23 +943,6 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi
return DEFAULT_ELSER_ID.equals(inferenceId) || DEFAULT_E5_ID.equals(inferenceId) || DEFAULT_RERANK_ID.equals(inferenceId); return DEFAULT_ELSER_ID.equals(inferenceId) || DEFAULT_E5_ID.equals(inferenceId) || DEFAULT_RERANK_ID.equals(inferenceId);
} }
static EmbeddingRequestChunker.EmbeddingType embeddingTypeFromTaskTypeAndSettings(
TaskType taskType,
ElasticsearchInternalServiceSettings serviceSettings
) {
return switch (taskType) {
case SPARSE_EMBEDDING -> EmbeddingRequestChunker.EmbeddingType.SPARSE;
case TEXT_EMBEDDING -> serviceSettings.elementType() == null
? EmbeddingRequestChunker.EmbeddingType.FLOAT
: EmbeddingRequestChunker.EmbeddingType.fromDenseVectorElementType(serviceSettings.elementType());
default -> throw new ElasticsearchStatusException(
"Chunking is not supported for task type [{}]",
RestStatus.BAD_REQUEST,
taskType
);
};
}
private void validateAgainstDeployment( private void validateAgainstDeployment(
String modelId, String modelId,
String deploymentId, String deploymentId,

View file

@ -331,7 +331,6 @@ public class GoogleAiStudioService extends SenderService {
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
inputs.getInputs(), inputs.getInputs(),
EMBEDDING_MAX_BATCH_SIZE, EMBEDDING_MAX_BATCH_SIZE,
EmbeddingRequestChunker.EmbeddingType.FLOAT,
googleAiStudioModel.getConfigurations().getChunkingSettings() googleAiStudioModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);

View file

@ -231,7 +231,6 @@ public class GoogleVertexAiService extends SenderService {
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
inputs.getInputs(), inputs.getInputs(),
EMBEDDING_MAX_BATCH_SIZE, EMBEDDING_MAX_BATCH_SIZE,
EmbeddingRequestChunker.EmbeddingType.FLOAT,
googleVertexAiModel.getConfigurations().getChunkingSettings() googleVertexAiModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);

View file

@ -130,7 +130,6 @@ public class HuggingFaceService extends HuggingFaceBaseService {
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
inputs.getInputs(), inputs.getInputs(),
EMBEDDING_MAX_BATCH_SIZE, EMBEDDING_MAX_BATCH_SIZE,
EmbeddingRequestChunker.EmbeddingType.FLOAT,
huggingFaceModel.getConfigurations().getChunkingSettings() huggingFaceModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);

View file

@ -25,11 +25,10 @@ import org.elasticsearch.inference.SettingsConfiguration;
import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.TaskType;
import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingFloat; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbedding;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError;
import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults;
import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults;
import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults;
import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults;
import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput;
import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender;
@ -111,16 +110,16 @@ public class HuggingFaceElserService extends HuggingFaceBaseService {
} }
private static List<ChunkedInference> translateToChunkedResults(DocumentsOnlyInput inputs, InferenceServiceResults inferenceResults) { private static List<ChunkedInference> translateToChunkedResults(DocumentsOnlyInput inputs, InferenceServiceResults inferenceResults) {
if (inferenceResults instanceof InferenceTextEmbeddingFloatResults textEmbeddingResults) { if (inferenceResults instanceof TextEmbeddingFloatResults textEmbeddingResults) {
validateInputSizeAgainstEmbeddings(inputs.getInputs(), textEmbeddingResults.embeddings().size()); validateInputSizeAgainstEmbeddings(inputs.getInputs(), textEmbeddingResults.embeddings().size());
var results = new ArrayList<ChunkedInference>(inputs.getInputs().size()); var results = new ArrayList<ChunkedInference>(inputs.getInputs().size());
for (int i = 0; i < inputs.getInputs().size(); i++) { for (int i = 0; i < inputs.getInputs().size(); i++) {
results.add( results.add(
new ChunkedInferenceEmbeddingFloat( new ChunkedInferenceEmbedding(
List.of( List.of(
new ChunkedInferenceEmbeddingFloat.FloatEmbeddingChunk( new TextEmbeddingFloatResults.Chunk(
textEmbeddingResults.embeddings().get(i).values(), textEmbeddingResults.embeddings().get(i).values(),
inputs.getInputs().get(i), inputs.getInputs().get(i),
new ChunkedInference.TextOffset(0, inputs.getInputs().get(i).length()) new ChunkedInference.TextOffset(0, inputs.getInputs().get(i).length())
@ -132,13 +131,13 @@ public class HuggingFaceElserService extends HuggingFaceBaseService {
return results; return results;
} else if (inferenceResults instanceof SparseEmbeddingResults sparseEmbeddingResults) { } else if (inferenceResults instanceof SparseEmbeddingResults sparseEmbeddingResults) {
var inputsAsList = DocumentsOnlyInput.of(inputs).getInputs(); var inputsAsList = DocumentsOnlyInput.of(inputs).getInputs();
return ChunkedInferenceEmbeddingSparse.listOf(inputsAsList, sparseEmbeddingResults); return ChunkedInferenceEmbedding.listOf(inputsAsList, sparseEmbeddingResults);
} else if (inferenceResults instanceof ErrorInferenceResults error) { } else if (inferenceResults instanceof ErrorInferenceResults error) {
return List.of(new ChunkedInferenceError(error.getException())); return List.of(new ChunkedInferenceError(error.getException()));
} else { } else {
String expectedClasses = Strings.format( String expectedClasses = Strings.format(
"One of [%s,%s]", "One of [%s,%s]",
InferenceTextEmbeddingFloatResults.class.getSimpleName(), TextEmbeddingFloatResults.class.getSimpleName(),
SparseEmbeddingResults.class.getSimpleName() SparseEmbeddingResults.class.getSimpleName()
); );
throw createInvalidChunkedResultException(expectedClasses, inferenceResults.getWriteableName()); throw createInvalidChunkedResultException(expectedClasses, inferenceResults.getWriteableName());

View file

@ -310,7 +310,6 @@ public class IbmWatsonxService extends SenderService {
var batchedRequests = new EmbeddingRequestChunker( var batchedRequests = new EmbeddingRequestChunker(
input.getInputs(), input.getInputs(),
EMBEDDING_MAX_BATCH_SIZE, EMBEDDING_MAX_BATCH_SIZE,
EmbeddingRequestChunker.EmbeddingType.FLOAT,
model.getConfigurations().getChunkingSettings() model.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);
for (var request : batchedRequests) { for (var request : batchedRequests) {

View file

@ -268,7 +268,6 @@ public class JinaAIService extends SenderService {
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
inputs.getInputs(), inputs.getInputs(),
EMBEDDING_MAX_BATCH_SIZE, EMBEDDING_MAX_BATCH_SIZE,
EmbeddingRequestChunker.EmbeddingType.fromDenseVectorElementType(model.getServiceSettings().elementType()),
jinaaiModel.getConfigurations().getChunkingSettings() jinaaiModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);

View file

@ -113,7 +113,6 @@ public class MistralService extends SenderService {
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
inputs.getInputs(), inputs.getInputs(),
MistralConstants.MAX_BATCH_SIZE, MistralConstants.MAX_BATCH_SIZE,
EmbeddingRequestChunker.EmbeddingType.FLOAT,
mistralEmbeddingsModel.getConfigurations().getChunkingSettings() mistralEmbeddingsModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);

View file

@ -322,7 +322,6 @@ public class OpenAiService extends SenderService {
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker( List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker(
inputs.getInputs(), inputs.getInputs(),
EMBEDDING_MAX_BATCH_SIZE, EMBEDDING_MAX_BATCH_SIZE,
EmbeddingRequestChunker.EmbeddingType.FLOAT,
openAiModel.getConfigurations().getChunkingSettings() openAiModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener); ).batchRequestsWithListeners(listener);

Some files were not shown because too many files have changed in this diff Show more