diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java index cc662bd74757..c6421d76392c 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java @@ -74,7 +74,7 @@ class AddFileKeyStoreCommand extends BaseKeyStoreCommand { keyStore.setFile(setting, Files.readAllBytes(file)); } - keyStore.save(env.configFile(), getKeyStorePassword().getChars()); + keyStore.save(env.configDir(), getKeyStorePassword().getChars()); } @SuppressForbidden(reason = "file arg for cli") diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java index c01c18418858..a7ea6dcf7ce7 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java @@ -100,7 +100,7 @@ class AddStringKeyStoreCommand extends BaseKeyStoreCommand { } } - keyStore.save(env.configFile(), getKeyStorePassword().getChars()); + keyStore.save(env.configDir(), getKeyStorePassword().getChars()); } } diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java index 0380018d36cf..a8a75ac23c90 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java @@ -39,14 +39,14 @@ public abstract class BaseKeyStoreCommand extends KeyStoreAwareCommand { @Override public final void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { try { - final Path configFile = env.configFile(); + final Path configFile = env.configDir(); keyStore = KeyStoreWrapper.load(configFile); if (keyStore == null) { if (keyStoreMustExist) { throw new UserException( ExitCodes.DATA_ERROR, "Elasticsearch keystore not found at [" - + KeyStoreWrapper.keystorePath(env.configFile()) + + KeyStoreWrapper.keystorePath(env.configDir()) + "]. Use 'create' command to create one." ); } else if (options.has(forceOption) == false) { diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java index 4dca3d538263..9e4f70eee559 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java @@ -31,7 +31,7 @@ class ChangeKeyStorePasswordCommand extends BaseKeyStoreCommand { protected void executeCommand(Terminal terminal, OptionSet options, Environment env) throws Exception { try (SecureString newPassword = readPassword(terminal, true)) { final KeyStoreWrapper keyStore = getKeyStore(); - keyStore.save(env.configFile(), newPassword.getChars()); + keyStore.save(env.configDir(), newPassword.getChars()); terminal.println("Elasticsearch keystore password changed successfully."); } catch (SecurityException e) { throw new UserException(ExitCodes.DATA_ERROR, e.getMessage()); diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java index a922c92f5f44..ef561b08d9a5 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java @@ -40,7 +40,7 @@ class CreateKeyStoreCommand extends KeyStoreAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { try (SecureString password = options.has(passwordOption) ? readPassword(terminal, true) : new SecureString(new char[0])) { - Path keystoreFile = KeyStoreWrapper.keystorePath(env.configFile()); + Path keystoreFile = KeyStoreWrapper.keystorePath(env.configDir()); if (Files.exists(keystoreFile)) { if (terminal.promptYesNo("An elasticsearch keystore already exists. Overwrite?", false) == false) { terminal.println("Exiting without creating keystore."); @@ -48,8 +48,8 @@ class CreateKeyStoreCommand extends KeyStoreAwareCommand { } } KeyStoreWrapper keystore = KeyStoreWrapper.create(); - keystore.save(env.configFile(), password.getChars()); - terminal.println("Created elasticsearch keystore in " + KeyStoreWrapper.keystorePath(env.configFile())); + keystore.save(env.configDir(), password.getChars()); + terminal.println("Created elasticsearch keystore in " + KeyStoreWrapper.keystorePath(env.configDir())); } catch (SecurityException e) { throw new UserException(ExitCodes.IO_ERROR, "Error creating the elasticsearch keystore."); } diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java index 0428d5dcf7df..f0eaca1648b9 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java @@ -32,7 +32,7 @@ public class HasPasswordKeyStoreCommand extends KeyStoreAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - final Path configFile = env.configFile(); + final Path configFile = env.configDir(); final KeyStoreWrapper keyStore = KeyStoreWrapper.load(configFile); // We handle error printing here so we can respect the "--silent" flag diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java index 8a973c6d67f7..fb1a2ad1df7f 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java @@ -45,6 +45,6 @@ class RemoveSettingKeyStoreCommand extends BaseKeyStoreCommand { } keyStore.remove(setting); } - keyStore.save(env.configFile(), getKeyStorePassword().getChars()); + keyStore.save(env.configDir(), getKeyStorePassword().getChars()); } } diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java index b7061d6153b8..bbbfbf81f7ed 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java @@ -26,7 +26,7 @@ public class UpgradeKeyStoreCommand extends BaseKeyStoreCommand { @Override protected void executeCommand(final Terminal terminal, final OptionSet options, final Environment env) throws Exception { - KeyStoreWrapper.upgrade(getKeyStore(), env.configFile(), getKeyStorePassword().getChars()); + KeyStoreWrapper.upgrade(getKeyStore(), env.configDir(), getKeyStorePassword().getChars()); } } diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java index edd70e4e52f5..56706dd44f0c 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java @@ -46,14 +46,14 @@ public class AddFileKeyStoreCommandTests extends KeyStoreCommandTestCase { for (int i = 0; i < length; ++i) { bytes[i] = randomByte(); } - Path file = env.configFile().resolve(randomAlphaOfLength(16)); + Path file = env.configDir().resolve(randomAlphaOfLength(16)); Files.write(file, bytes); return file; } private void addFile(KeyStoreWrapper keystore, String setting, Path file, String password) throws Exception { keystore.setFile(setting, Files.readAllBytes(file)); - keystore.save(env.configFile(), password.toCharArray()); + keystore.save(env.configDir(), password.toCharArray()); } public void testMissingCreateWithEmptyPasswordWhenPrompted() throws Exception { @@ -77,7 +77,7 @@ public class AddFileKeyStoreCommandTests extends KeyStoreCommandTestCase { terminal.addSecretInput(randomFrom("", "keystorepassword")); terminal.addTextInput("n"); // explicit no execute("foo"); - assertNull(KeyStoreWrapper.load(env.configFile())); + assertNull(KeyStoreWrapper.load(env.configDir())); } public void testOverwritePromptDefault() throws Exception { diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java index 3de18e094104..412624be1d50 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java @@ -83,7 +83,7 @@ public class AddStringKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testMissingNoCreate() throws Exception { terminal.addTextInput("n"); // explicit no execute("foo"); - assertNull(KeyStoreWrapper.load(env.configFile())); + assertNull(KeyStoreWrapper.load(env.configDir())); } public void testOverwritePromptDefault() throws Exception { @@ -143,7 +143,7 @@ public class AddStringKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testPromptForValue() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); terminal.addSecretInput("secret value"); execute("foo"); @@ -152,7 +152,7 @@ public class AddStringKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testPromptForMultipleValues() throws Exception { final String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); terminal.addSecretInput("bar1"); terminal.addSecretInput("bar2"); @@ -165,7 +165,7 @@ public class AddStringKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testStdinShort() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("secret value 1"); execute("-x", "foo"); @@ -174,7 +174,7 @@ public class AddStringKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testStdinLong() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("secret value 2"); execute("--stdin", "foo"); @@ -183,7 +183,7 @@ public class AddStringKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testStdinNoInput() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput(""); execute("-x", "foo"); @@ -192,7 +192,7 @@ public class AddStringKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testStdinInputWithLineBreaks() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("Typedthisandhitenter\n"); execute("-x", "foo"); @@ -201,7 +201,7 @@ public class AddStringKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testStdinInputWithCarriageReturn() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("Typedthisandhitenter\r"); execute("-x", "foo"); @@ -210,7 +210,7 @@ public class AddStringKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testStdinWithMultipleValues() throws Exception { final String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("bar1\nbar2\nbar3"); execute(randomFrom("-x", "--stdin"), "foo1", "foo2", "foo3"); @@ -221,7 +221,7 @@ public class AddStringKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testAddUtf8String() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); final int stringSize = randomIntBetween(8, 16); try (CharArrayWriter secretChars = new CharArrayWriter(stringSize)) { diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java index 0fc76943f9d0..d93bc2466ed7 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java @@ -42,7 +42,7 @@ public class BootstrapTests extends ESTestCase { public void testLoadSecureSettings() throws Exception { final char[] password = KeyStoreWrapperTests.getPossibleKeystorePassword(); - final Path configPath = env.configFile(); + final Path configPath = env.configDir(); final SecureString seed; try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) { seed = KeyStoreWrapper.SEED_SETTING.get(Settings.builder().setSecureSettings(keyStoreWrapper).build()); diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java index 72a83a48b634..74b8c634939f 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java @@ -48,7 +48,7 @@ public class CreateKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testDefaultNotPromptForPassword() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); execute(); - Path configDir = env.configFile(); + Path configDir = env.configDir(); assertNotNull(KeyStoreWrapper.load(configDir)); } @@ -63,7 +63,7 @@ public class CreateKeyStoreCommandTests extends KeyStoreCommandTestCase { } else { execute(); } - Path configDir = env.configFile(); + Path configDir = env.configDir(); assertNotNull(KeyStoreWrapper.load(configDir)); } @@ -79,13 +79,13 @@ public class CreateKeyStoreCommandTests extends KeyStoreCommandTestCase { } else { execute(); } - Path configDir = env.configFile(); + Path configDir = env.configDir(); assertNotNull(KeyStoreWrapper.load(configDir)); } public void testOverwrite() throws Exception { String password = getPossibleKeystorePassword(); - Path keystoreFile = KeyStoreWrapper.keystorePath(env.configFile()); + Path keystoreFile = KeyStoreWrapper.keystorePath(env.configDir()); byte[] content = "not a keystore".getBytes(StandardCharsets.UTF_8); Files.write(keystoreFile, content); @@ -110,6 +110,6 @@ public class CreateKeyStoreCommandTests extends KeyStoreCommandTestCase { } else { execute(); } - assertNotNull(KeyStoreWrapper.load(env.configFile())); + assertNotNull(KeyStoreWrapper.load(env.configDir())); } } diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java index 80edce4a2079..fcbe7b222629 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java @@ -77,11 +77,11 @@ public abstract class KeyStoreCommandTestCase extends CommandTestCase { } void saveKeystore(KeyStoreWrapper keystore, String password) throws Exception { - keystore.save(env.configFile(), password.toCharArray()); + keystore.save(env.configDir(), password.toCharArray()); } KeyStoreWrapper loadKeystore(String password) throws Exception { - KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password.toCharArray()); return keystore; } diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java index 5ab27bac3998..ee3a53d5c3df 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java @@ -84,8 +84,8 @@ public class KeyStoreWrapperTests extends ESTestCase { bytes[i] = (byte) i; } keystore.setFile("foo", bytes); - keystore.save(env.configFile(), password); - keystore = KeyStoreWrapper.load(env.configFile()); + keystore.save(env.configDir(), password); + keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password); try (InputStream stream = keystore.getFile("foo")) { for (int i = 0; i < 256; ++i) { @@ -114,8 +114,8 @@ public class KeyStoreWrapperTests extends ESTestCase { invalidPassword[realPassword.length] = '#'; } KeyStoreWrapper keystore = KeyStoreWrapper.create(); - keystore.save(env.configFile(), realPassword); - final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configFile()); + keystore.save(env.configDir(), realPassword); + final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configDir()); final SecurityException exception = expectThrows(SecurityException.class, () -> loadedkeystore.decrypt(invalidPassword)); if (inFipsJvm()) { assertThat( @@ -133,8 +133,8 @@ public class KeyStoreWrapperTests extends ESTestCase { public void testDecryptKeyStoreWithShortPasswordInFips() throws Exception { assumeTrue("This should run only in FIPS mode", inFipsJvm()); KeyStoreWrapper keystore = KeyStoreWrapper.create(); - keystore.save(env.configFile(), "alongenoughpassword".toCharArray()); - final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configFile()); + keystore.save(env.configDir(), "alongenoughpassword".toCharArray()); + final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configDir()); final GeneralSecurityException exception = expectThrows( GeneralSecurityException.class, () -> loadedkeystore.decrypt("shortpwd".toCharArray()) // shorter than 14 characters @@ -147,7 +147,7 @@ public class KeyStoreWrapperTests extends ESTestCase { KeyStoreWrapper keystore = KeyStoreWrapper.create(); final GeneralSecurityException exception = expectThrows( GeneralSecurityException.class, - () -> keystore.save(env.configFile(), "shortpwd".toCharArray()) // shorter than 14 characters + () -> keystore.save(env.configDir(), "shortpwd".toCharArray()) // shorter than 14 characters ); assertThat(exception.getMessage(), containsString("Error generating an encryption key from the provided password")); } @@ -192,18 +192,18 @@ public class KeyStoreWrapperTests extends ESTestCase { final char[] password = getPossibleKeystorePassword(); KeyStoreWrapper keystore = KeyStoreWrapper.create(); SecureString seed = keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()); - keystore.save(env.configFile(), password); + keystore.save(env.configDir(), password); // upgrade does not overwrite seed - KeyStoreWrapper.upgrade(keystore, env.configFile(), password); + KeyStoreWrapper.upgrade(keystore, env.configDir(), password); assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString()); - keystore = KeyStoreWrapper.load(env.configFile()); + keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password); assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString()); } public void testFailWhenCannotConsumeSecretStream() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -234,7 +234,7 @@ public class KeyStoreWrapperTests extends ESTestCase { public void testFailWhenCannotConsumeEncryptedBytesStream() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -266,7 +266,7 @@ public class KeyStoreWrapperTests extends ESTestCase { public void testFailWhenSecretStreamNotConsumed() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -296,7 +296,7 @@ public class KeyStoreWrapperTests extends ESTestCase { public void testFailWhenEncryptedBytesStreamIsNotConsumed() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -359,11 +359,11 @@ public class KeyStoreWrapperTests extends ESTestCase { final char[] password = getPossibleKeystorePassword(); KeyStoreWrapper keystore = KeyStoreWrapper.create(); keystore.remove(KeyStoreWrapper.SEED_SETTING.getKey()); - keystore.save(env.configFile(), password); - KeyStoreWrapper.upgrade(keystore, env.configFile(), password); + keystore.save(env.configDir(), password); + KeyStoreWrapper.upgrade(keystore, env.configDir(), password); SecureString seed = keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()); assertNotNull(seed); - keystore = KeyStoreWrapper.load(env.configFile()); + keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password); assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString()); } @@ -380,7 +380,7 @@ public class KeyStoreWrapperTests extends ESTestCase { public void testBackcompatV4() throws Exception { assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -421,10 +421,10 @@ public class KeyStoreWrapperTests extends ESTestCase { final Path temp = createTempDir(); Files.writeString(temp.resolve("file_setting"), "file_value", StandardCharsets.UTF_8); wrapper.setFile("file_setting", Files.readAllBytes(temp.resolve("file_setting"))); - wrapper.save(env.configFile(), password); + wrapper.save(env.configDir(), password); wrapper.close(); - final KeyStoreWrapper afterSave = KeyStoreWrapper.load(env.configFile()); + final KeyStoreWrapper afterSave = KeyStoreWrapper.load(env.configDir()); assertNotNull(afterSave); afterSave.decrypt(password); assertThat(afterSave.getSettingNames(), equalTo(Set.of("keystore.seed", "string_setting", "file_setting"))); @@ -510,8 +510,8 @@ public class KeyStoreWrapperTests extends ESTestCase { // testing with password and raw dataBytes[] final char[] password = getPossibleKeystorePassword(); - wrapper.save(env.configFile(), password); - final KeyStoreWrapper fromFile = KeyStoreWrapper.load(env.configFile()); + wrapper.save(env.configDir(), password); + final KeyStoreWrapper fromFile = KeyStoreWrapper.load(env.configDir()); fromFile.decrypt(password); assertThat(fromFile.getSettingNames(), hasSize(2)); diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java index bb533f32c7ac..894b9d215a47 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java @@ -62,11 +62,11 @@ public class UpgradeKeyStoreCommandTests extends KeyStoreCommandTestCase { } private void assertKeystoreUpgrade(String file, int version, @Nullable String password) throws Exception { - final Path keystore = KeyStoreWrapper.keystorePath(env.configFile()); + final Path keystore = KeyStoreWrapper.keystorePath(env.configDir()); try (InputStream is = KeyStoreWrapperTests.class.getResourceAsStream(file); OutputStream os = Files.newOutputStream(keystore)) { is.transferTo(os); } - try (KeyStoreWrapper beforeUpgrade = KeyStoreWrapper.load(env.configFile())) { + try (KeyStoreWrapper beforeUpgrade = KeyStoreWrapper.load(env.configDir())) { assertNotNull(beforeUpgrade); assertThat(beforeUpgrade.getFormatVersion(), equalTo(version)); } @@ -77,7 +77,7 @@ public class UpgradeKeyStoreCommandTests extends KeyStoreCommandTestCase { execute(); terminal.reset(); - try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configFile())) { + try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configDir())) { assertNotNull(afterUpgrade); assertThat(afterUpgrade.getFormatVersion(), equalTo(KeyStoreWrapper.CURRENT_VERSION)); afterUpgrade.decrypt(password != null ? password.toCharArray() : new char[0]); @@ -87,6 +87,6 @@ public class UpgradeKeyStoreCommandTests extends KeyStoreCommandTestCase { public void testKeystoreDoesNotExist() { final UserException e = expectThrows(UserException.class, this::execute); - assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configFile()) + "]"))); + assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configDir()) + "]"))); } } diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java index d443cf5e1e18..f4601d70a7f0 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java @@ -249,8 +249,8 @@ public class InstallPluginAction implements Closeable { final List deleteOnFailure = new ArrayList<>(); deleteOnFailures.put(pluginId, deleteOnFailure); - final Path pluginZip = download(plugin, env.tmpFile()); - final Path extractedZip = unzip(pluginZip, env.pluginsFile()); + final Path pluginZip = download(plugin, env.tmpDir()); + final Path extractedZip = unzip(pluginZip, env.pluginsDir()); deleteOnFailure.add(extractedZip); final PluginDescriptor pluginDescriptor = installPlugin(plugin, extractedZip, deleteOnFailure); terminal.println(logPrefix + "Installed " + pluginDescriptor.getName()); @@ -868,14 +868,14 @@ public class InstallPluginAction implements Closeable { PluginsUtils.verifyCompatibility(info); // checking for existing version of the plugin - verifyPluginName(env.pluginsFile(), info.getName()); + verifyPluginName(env.pluginsDir(), info.getName()); - PluginsUtils.checkForFailedPluginRemovals(env.pluginsFile()); + PluginsUtils.checkForFailedPluginRemovals(env.pluginsDir()); terminal.println(VERBOSE, info.toString()); // check for jar hell before any copying - jarHellCheck(info, pluginRoot, env.pluginsFile(), env.modulesFile()); + jarHellCheck(info, pluginRoot, env.pluginsDir(), env.modulesDir()); if (info.isStable() && hasNamedComponentFile(pluginRoot) == false) { generateNameComponentFile(pluginRoot); @@ -922,9 +922,9 @@ public class InstallPluginAction implements Closeable { */ private PluginDescriptor installPlugin(InstallablePlugin descriptor, Path tmpRoot, List deleteOnFailure) throws Exception { final PluginDescriptor info = loadPluginInfo(tmpRoot); - PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpFile()); + PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpDir()); if (pluginPolicy != null) { - Set permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpFile()); + Set permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpDir()); PluginSecurity.confirmPolicyExceptions(terminal, permissions, batch); } @@ -938,14 +938,14 @@ public class InstallPluginAction implements Closeable { ); } - final Path destination = env.pluginsFile().resolve(info.getName()); + final Path destination = env.pluginsDir().resolve(info.getName()); deleteOnFailure.add(destination); installPluginSupportFiles( info, tmpRoot, - env.binFile().resolve(info.getName()), - env.configFile().resolve(info.getName()), + env.binDir().resolve(info.getName()), + env.configDir().resolve(info.getName()), deleteOnFailure ); movePlugin(tmpRoot, destination); diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java index fc578c81b24c..f51a478fe213 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java @@ -40,13 +40,13 @@ class ListPluginsCommand extends EnvironmentAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - if (Files.exists(env.pluginsFile()) == false) { - throw new IOException("Plugins directory missing: " + env.pluginsFile()); + if (Files.exists(env.pluginsDir()) == false) { + throw new IOException("Plugins directory missing: " + env.pluginsDir()); } - terminal.println(Terminal.Verbosity.VERBOSE, "Plugins directory: " + env.pluginsFile()); + terminal.println(Terminal.Verbosity.VERBOSE, "Plugins directory: " + env.pluginsDir()); final List plugins = new ArrayList<>(); - try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsFile())) { + try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsDir())) { for (Path path : paths) { if (path.getFileName().toString().equals(ELASTICSEARCH_PLUGINS_YML_CACHE) == false) { plugins.add(path); @@ -61,7 +61,7 @@ class ListPluginsCommand extends EnvironmentAwareCommand { private static void printPlugin(Environment env, Terminal terminal, Path plugin, String prefix) throws IOException { terminal.println(Terminal.Verbosity.SILENT, prefix + plugin.getFileName().toString()); - PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsFile().resolve(plugin)); + PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsDir().resolve(plugin)); terminal.println(Terminal.Verbosity.VERBOSE, info.toString(prefix)); // When PluginDescriptor#getElasticsearchVersion returns a string, we can revisit the need diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java index a8f9e746a24e..ac9c2b21788c 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java @@ -93,7 +93,7 @@ public class RemovePluginAction { // We build a new map where the keys are plugins that extend plugins // we want to remove and the values are the plugins we can't remove // because of this dependency - Map> pluginDependencyMap = PluginsUtils.getDependencyMapView(env.pluginsFile()); + Map> pluginDependencyMap = PluginsUtils.getDependencyMapView(env.pluginsDir()); for (Map.Entry> entry : pluginDependencyMap.entrySet()) { for (String extendedPlugin : entry.getValue()) { for (InstallablePlugin plugin : plugins) { @@ -121,9 +121,9 @@ public class RemovePluginAction { private void checkCanRemove(InstallablePlugin plugin) throws UserException { String pluginId = plugin.getId(); - final Path pluginDir = env.pluginsFile().resolve(pluginId); - final Path pluginConfigDir = env.configFile().resolve(pluginId); - final Path removing = env.pluginsFile().resolve(".removing-" + pluginId); + final Path pluginDir = env.pluginsDir().resolve(pluginId); + final Path pluginConfigDir = env.configDir().resolve(pluginId); + final Path removing = env.pluginsDir().resolve(".removing-" + pluginId); /* * If the plugin does not exist and the plugin config does not exist, fail to the user that the plugin is not found, unless there's @@ -147,7 +147,7 @@ public class RemovePluginAction { } } - final Path pluginBinDir = env.binFile().resolve(pluginId); + final Path pluginBinDir = env.binDir().resolve(pluginId); if (Files.exists(pluginBinDir)) { if (Files.isDirectory(pluginBinDir) == false) { throw new UserException(ExitCodes.IO_ERROR, "bin dir for " + pluginId + " is not a directory"); @@ -157,9 +157,9 @@ public class RemovePluginAction { private void removePlugin(InstallablePlugin plugin) throws IOException { final String pluginId = plugin.getId(); - final Path pluginDir = env.pluginsFile().resolve(pluginId); - final Path pluginConfigDir = env.configFile().resolve(pluginId); - final Path removing = env.pluginsFile().resolve(".removing-" + pluginId); + final Path pluginDir = env.pluginsDir().resolve(pluginId); + final Path pluginConfigDir = env.configDir().resolve(pluginId); + final Path removing = env.pluginsDir().resolve(".removing-" + pluginId); terminal.println("-> removing [" + pluginId + "]..."); @@ -176,7 +176,7 @@ public class RemovePluginAction { terminal.println(VERBOSE, "removing [" + pluginDir + "]"); } - final Path pluginBinDir = env.binFile().resolve(pluginId); + final Path pluginBinDir = env.binDir().resolve(pluginId); if (Files.exists(pluginBinDir)) { try (Stream paths = Files.list(pluginBinDir)) { pluginPaths.addAll(paths.toList()); diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java index d6d061942277..6d77437bd71d 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java @@ -61,7 +61,7 @@ public class SyncPluginsAction { * @throws UserException if a plugins config file is found. */ public static void ensureNoConfigFile(Environment env) throws UserException { - final Path pluginsConfig = env.configFile().resolve(ELASTICSEARCH_PLUGINS_YML); + final Path pluginsConfig = env.configDir().resolve(ELASTICSEARCH_PLUGINS_YML); if (Files.exists(pluginsConfig)) { throw new UserException( ExitCodes.USAGE, @@ -79,16 +79,16 @@ public class SyncPluginsAction { * @throws Exception if anything goes wrong */ public void execute() throws Exception { - final Path configPath = this.env.configFile().resolve(ELASTICSEARCH_PLUGINS_YML); - final Path previousConfigPath = this.env.pluginsFile().resolve(ELASTICSEARCH_PLUGINS_YML_CACHE); + final Path configPath = this.env.configDir().resolve(ELASTICSEARCH_PLUGINS_YML); + final Path previousConfigPath = this.env.pluginsDir().resolve(ELASTICSEARCH_PLUGINS_YML_CACHE); if (Files.exists(configPath) == false) { // The `PluginsManager` will have checked that this file exists before invoking the action. throw new PluginSyncException("Plugins config does not exist: " + configPath.toAbsolutePath()); } - if (Files.exists(env.pluginsFile()) == false) { - throw new PluginSyncException("Plugins directory missing: " + env.pluginsFile()); + if (Files.exists(env.pluginsDir()) == false) { + throw new PluginSyncException("Plugins directory missing: " + env.pluginsDir()); } // Parse descriptor file @@ -267,14 +267,14 @@ public class SyncPluginsAction { final List plugins = new ArrayList<>(); try { - try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsFile())) { + try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsDir())) { for (Path pluginPath : paths) { String filename = pluginPath.getFileName().toString(); if (filename.startsWith(".")) { continue; } - PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsFile().resolve(pluginPath)); + PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsDir().resolve(pluginPath)); plugins.add(info); // Check for a version mismatch, unless it's an official plugin since we can upgrade them. diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java index 88b24ab9ae61..a5dacebec69b 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java @@ -37,7 +37,7 @@ public class SyncPluginsCliProvider implements CliToolProvider { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { var action = new SyncPluginsAction(terminal, env); - if (Files.exists(env.configFile().resolve(ELASTICSEARCH_PLUGINS_YML)) == false) { + if (Files.exists(env.configDir().resolve(ELASTICSEARCH_PLUGINS_YML)) == false) { return; } if (Build.current().type() != Build.Type.DOCKER) { diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java index d638534943ec..d2c8d4adb4d1 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java @@ -354,7 +354,7 @@ public class InstallPluginActionTests extends ESTestCase { } void assertPlugin(String name, Path original, Environment environment) throws IOException { - assertPluginInternal(name, environment.pluginsFile(), original); + assertPluginInternal(name, environment.pluginsDir(), original); assertConfigAndBin(name, original, environment); assertInstallCleaned(environment); } @@ -395,7 +395,7 @@ public class InstallPluginActionTests extends ESTestCase { void assertConfigAndBin(String name, Path original, Environment environment) throws IOException { if (Files.exists(original.resolve("bin"))) { - Path binDir = environment.binFile().resolve(name); + Path binDir = environment.binDir().resolve(name); assertTrue("bin dir exists", Files.exists(binDir)); assertTrue("bin is a dir", Files.isDirectory(binDir)); try (DirectoryStream stream = Files.newDirectoryStream(binDir)) { @@ -409,7 +409,7 @@ public class InstallPluginActionTests extends ESTestCase { } } if (Files.exists(original.resolve("config"))) { - Path configDir = environment.configFile().resolve(name); + Path configDir = environment.configDir().resolve(name); assertTrue("config dir exists", Files.exists(configDir)); assertTrue("config is a dir", Files.isDirectory(configDir)); @@ -417,7 +417,7 @@ public class InstallPluginActionTests extends ESTestCase { GroupPrincipal group = null; if (isPosix) { - PosixFileAttributes configAttributes = Files.getFileAttributeView(environment.configFile(), PosixFileAttributeView.class) + PosixFileAttributes configAttributes = Files.getFileAttributeView(environment.configDir(), PosixFileAttributeView.class) .readAttributes(); user = configAttributes.owner(); group = configAttributes.group(); @@ -446,7 +446,7 @@ public class InstallPluginActionTests extends ESTestCase { } void assertInstallCleaned(Environment environment) throws IOException { - try (DirectoryStream stream = Files.newDirectoryStream(environment.pluginsFile())) { + try (DirectoryStream stream = Files.newDirectoryStream(environment.pluginsDir())) { for (Path file : stream) { if (file.getFileName().toString().startsWith(".installing")) { fail("Installation dir still exists, " + file); @@ -549,7 +549,7 @@ public class InstallPluginActionTests extends ESTestCase { () -> installPlugins(List.of(pluginZip, nonexistentPluginZip), env.v1()) ); assertThat(e.getMessage(), containsString("does-not-exist")); - final Path fakeInstallPath = env.v2().pluginsFile().resolve("fake"); + final Path fakeInstallPath = env.v2().pluginsDir().resolve("fake"); // fake should have been removed when the file not found exception occurred assertFalse(Files.exists(fakeInstallPath)); assertInstallCleaned(env.v2()); @@ -557,7 +557,7 @@ public class InstallPluginActionTests extends ESTestCase { public void testInstallFailsIfPreviouslyRemovedPluginFailed() throws Exception { InstallablePlugin pluginZip = createPluginZip("fake", pluginDir); - final Path removing = env.v2().pluginsFile().resolve(".removing-failed"); + final Path removing = env.v2().pluginsDir().resolve(".removing-failed"); Files.createDirectory(removing); final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip)); final String expected = Strings.format( @@ -603,11 +603,11 @@ public class InstallPluginActionTests extends ESTestCase { public void testPluginsDirReadOnly() throws Exception { assumeTrue("posix and filesystem", isPosix && isReal); - try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsFile())) { + try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsDir())) { pluginsAttrs.setPermissions(new HashSet<>()); InstallablePlugin pluginZip = createPluginZip("fake", pluginDir); IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip)); - assertThat(e.getMessage(), containsString(env.v2().pluginsFile().toString())); + assertThat(e.getMessage(), containsString(env.v2().pluginsDir().toString())); } assertInstallCleaned(env.v2()); } @@ -694,7 +694,7 @@ public class InstallPluginActionTests extends ESTestCase { Files.createFile(binDir.resolve("somescript")); InstallablePlugin pluginZip = createPluginZip("elasticsearch", pluginDir); FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip)); - assertThat(e.getMessage(), containsString(env.v2().binFile().resolve("elasticsearch").toString())); + assertThat(e.getMessage(), containsString(env.v2().binDir().resolve("elasticsearch").toString())); assertInstallCleaned(env.v2()); } @@ -704,7 +704,7 @@ public class InstallPluginActionTests extends ESTestCase { Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); InstallablePlugin pluginZip = createPluginZip("fake", pluginDir); - try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) { + try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binDir())) { Set perms = binAttrs.getCopyPermissions(); // make sure at least one execute perm is missing, so we know we forced it during installation perms.remove(PosixFilePermission.GROUP_EXECUTE); @@ -734,7 +734,7 @@ public class InstallPluginActionTests extends ESTestCase { installPlugin(pluginZip); assertPlugin("fake", tempPluginDir, env.v2()); - final Path fake = env.v2().pluginsFile().resolve("fake"); + final Path fake = env.v2().pluginsDir().resolve("fake"); final Path resources = fake.resolve("resources"); final Path platform = fake.resolve("platform"); final Path platformName = platform.resolve("linux-x86_64"); @@ -784,7 +784,7 @@ public class InstallPluginActionTests extends ESTestCase { } public void testExistingConfig() throws Exception { - Path envConfigDir = env.v2().configFile().resolve("fake"); + Path envConfigDir = env.v2().configDir().resolve("fake"); Files.createDirectories(envConfigDir); Files.write(envConfigDir.resolve("custom.yml"), "existing config".getBytes(StandardCharsets.UTF_8)); Path configDir = pluginDir.resolve("config"); @@ -921,7 +921,7 @@ public class InstallPluginActionTests extends ESTestCase { e.getMessage(), equalTo( "plugin directory [" - + env.v2().pluginsFile().resolve("fake") + + env.v2().pluginsDir().resolve("fake") + "] already exists; " + "if you need to update the plugin, uninstall it first using command 'remove fake'" ) @@ -1499,7 +1499,7 @@ public class InstallPluginActionTests extends ESTestCase { assertThat(e.getMessage(), containsString("installation aborted by user")); assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning)); - try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsFile())) { + try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsDir())) { assertThat(fileStream.collect(Collectors.toList()), empty()); } @@ -1512,7 +1512,7 @@ public class InstallPluginActionTests extends ESTestCase { e = expectThrows(UserException.class, () -> installPlugin(pluginZip)); assertThat(e.getMessage(), containsString("installation aborted by user")); assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning)); - try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsFile())) { + try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsDir())) { assertThat(fileStream.collect(Collectors.toList()), empty()); } } @@ -1566,7 +1566,7 @@ public class InstallPluginActionTests extends ESTestCase { InstallablePlugin stablePluginZip = createStablePlugin("stable1", pluginDir, true); installPlugins(List.of(stablePluginZip), env.v1()); assertPlugin("stable1", pluginDir, env.v2()); - assertNamedComponentFile("stable1", env.v2().pluginsFile(), namedComponentsJSON()); + assertNamedComponentFile("stable1", env.v2().pluginsDir(), namedComponentsJSON()); } @SuppressWarnings("unchecked") @@ -1577,7 +1577,7 @@ public class InstallPluginActionTests extends ESTestCase { installPlugins(List.of(stablePluginZip), env.v1()); assertPlugin("stable1", pluginDir, env.v2()); - assertNamedComponentFile("stable1", env.v2().pluginsFile(), namedComponentsJSON()); + assertNamedComponentFile("stable1", env.v2().pluginsDir(), namedComponentsJSON()); } public void testGetSemanticVersion() { diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java index 0064b8c4bc51..5249aeefc2f2 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java @@ -65,7 +65,7 @@ public class ListPluginsCommandTests extends CommandTestCase { final boolean hasNativeController ) throws IOException { PluginTestUtil.writePluginProperties( - env.pluginsFile().resolve(name), + env.pluginsDir().resolve(name), "description", description, "name", @@ -84,9 +84,9 @@ public class ListPluginsCommandTests extends CommandTestCase { } public void testPluginsDirMissing() throws Exception { - Files.delete(env.pluginsFile()); + Files.delete(env.pluginsDir()); IOException e = expectThrows(IOException.class, () -> execute()); - assertEquals("Plugins directory missing: " + env.pluginsFile(), e.getMessage()); + assertEquals("Plugins directory missing: " + env.pluginsDir(), e.getMessage()); } public void testNoPlugins() throws Exception { @@ -112,7 +112,7 @@ public class ListPluginsCommandTests extends CommandTestCase { execute("-v"); assertEquals( buildMultiline( - "Plugins directory: " + env.pluginsFile(), + "Plugins directory: " + env.pluginsDir(), "fake_plugin", "- Plugin information:", "Name: fake_plugin", @@ -134,7 +134,7 @@ public class ListPluginsCommandTests extends CommandTestCase { execute("-v"); assertEquals( buildMultiline( - "Plugins directory: " + env.pluginsFile(), + "Plugins directory: " + env.pluginsDir(), "fake_plugin1", "- Plugin information:", "Name: fake_plugin1", @@ -157,7 +157,7 @@ public class ListPluginsCommandTests extends CommandTestCase { execute("-v"); assertEquals( buildMultiline( - "Plugins directory: " + env.pluginsFile(), + "Plugins directory: " + env.pluginsDir(), "fake_plugin1", "- Plugin information:", "Name: fake_plugin1", @@ -193,14 +193,14 @@ public class ListPluginsCommandTests extends CommandTestCase { } public void testPluginWithoutDescriptorFile() throws Exception { - final Path pluginDir = env.pluginsFile().resolve("fake1"); + final Path pluginDir = env.pluginsDir().resolve("fake1"); Files.createDirectories(pluginDir); var e = expectThrows(IllegalStateException.class, () -> execute()); assertThat(e.getMessage(), equalTo("Plugin [fake1] is missing a descriptor properties file.")); } public void testPluginWithWrongDescriptorFile() throws Exception { - final Path pluginDir = env.pluginsFile().resolve("fake1"); + final Path pluginDir = env.pluginsDir().resolve("fake1"); PluginTestUtil.writePluginProperties(pluginDir, "description", "fake desc"); var e = expectThrows(IllegalArgumentException.class, () -> execute()); assertThat(e.getMessage(), startsWith("property [name] is missing for plugin")); @@ -208,7 +208,7 @@ public class ListPluginsCommandTests extends CommandTestCase { public void testExistingIncompatiblePlugin() throws Exception { PluginTestUtil.writePluginProperties( - env.pluginsFile().resolve("fake_plugin1"), + env.pluginsDir().resolve("fake_plugin1"), "description", "fake desc 1", "name", diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java index aabdd4aaceb9..8338c395e5e4 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java @@ -58,11 +58,11 @@ public class RemovePluginActionTests extends ESTestCase { } void createPlugin(String name) throws IOException { - createPlugin(env.pluginsFile(), name, Version.CURRENT); + createPlugin(env.pluginsDir(), name, Version.CURRENT); } void createPlugin(String name, Version version) throws IOException { - createPlugin(env.pluginsFile(), name, version); + createPlugin(env.pluginsDir(), name, version); } void createPlugin(Path path, String name, Version version) throws IOException { @@ -98,7 +98,7 @@ public class RemovePluginActionTests extends ESTestCase { } static void assertRemoveCleaned(Environment env) throws IOException { - try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsFile())) { + try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsDir())) { for (Path file : stream) { if (file.getFileName().toString().startsWith(".removing")) { fail("Removal dir still exists, " + file); @@ -115,84 +115,84 @@ public class RemovePluginActionTests extends ESTestCase { public void testBasic() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); - Files.createDirectory(env.pluginsFile().resolve("fake").resolve("subdir")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); + Files.createDirectory(env.pluginsDir().resolve("fake").resolve("subdir")); createPlugin("other"); removePlugin("fake", home, randomBoolean()); - assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); - assertTrue(Files.exists(env.pluginsFile().resolve("other"))); + assertFalse(Files.exists(env.pluginsDir().resolve("fake"))); + assertTrue(Files.exists(env.pluginsDir().resolve("other"))); assertRemoveCleaned(env); } /** Check that multiple plugins can be removed at the same time. */ public void testRemoveMultiple() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); - Files.createDirectory(env.pluginsFile().resolve("fake").resolve("subdir")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); + Files.createDirectory(env.pluginsDir().resolve("fake").resolve("subdir")); createPlugin("other"); - Files.createFile(env.pluginsFile().resolve("other").resolve("plugin.jar")); - Files.createDirectory(env.pluginsFile().resolve("other").resolve("subdir")); + Files.createFile(env.pluginsDir().resolve("other").resolve("plugin.jar")); + Files.createDirectory(env.pluginsDir().resolve("other").resolve("subdir")); removePlugin("fake", home, randomBoolean()); removePlugin("other", home, randomBoolean()); - assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); - assertFalse(Files.exists(env.pluginsFile().resolve("other"))); + assertFalse(Files.exists(env.pluginsDir().resolve("fake"))); + assertFalse(Files.exists(env.pluginsDir().resolve("other"))); assertRemoveCleaned(env); } public void testBin() throws Exception { createPlugin("fake"); - Path binDir = env.binFile().resolve("fake"); + Path binDir = env.binDir().resolve("fake"); Files.createDirectories(binDir); Files.createFile(binDir.resolve("somescript")); removePlugin("fake", home, randomBoolean()); - assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); - assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); + assertFalse(Files.exists(env.pluginsDir().resolve("fake"))); + assertTrue(Files.exists(env.binDir().resolve("elasticsearch"))); assertFalse(Files.exists(binDir)); assertRemoveCleaned(env); } public void testBinNotDir() throws Exception { createPlugin("fake"); - Files.createFile(env.binFile().resolve("fake")); + Files.createFile(env.binDir().resolve("fake")); UserException e = expectThrows(UserException.class, () -> removePlugin("fake", home, randomBoolean())); assertThat(e.getMessage(), containsString("not a directory")); - assertTrue(Files.exists(env.pluginsFile().resolve("fake"))); // did not remove - assertTrue(Files.exists(env.binFile().resolve("fake"))); + assertTrue(Files.exists(env.pluginsDir().resolve("fake"))); // did not remove + assertTrue(Files.exists(env.binDir().resolve("fake"))); assertRemoveCleaned(env); } public void testConfigDirPreserved() throws Exception { createPlugin("fake"); - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); final MockTerminal terminal = removePlugin("fake", home, false); - assertTrue(Files.exists(env.configFile().resolve("fake"))); + assertTrue(Files.exists(env.configDir().resolve("fake"))); assertThat(terminal.getOutput(), containsString(expectedConfigDirPreservedMessage(configDir))); assertRemoveCleaned(env); } public void testPurgePluginExists() throws Exception { createPlugin("fake"); - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); if (randomBoolean()) { Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); } final MockTerminal terminal = removePlugin("fake", home, true); - assertFalse(Files.exists(env.configFile().resolve("fake"))); + assertFalse(Files.exists(env.configDir().resolve("fake"))); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); assertRemoveCleaned(env); } public void testPurgePluginDoesNotExist() throws Exception { - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); final MockTerminal terminal = removePlugin("fake", home, true); - assertFalse(Files.exists(env.configFile().resolve("fake"))); + assertFalse(Files.exists(env.configDir().resolve("fake"))); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); assertRemoveCleaned(env); } @@ -203,8 +203,8 @@ public class RemovePluginActionTests extends ESTestCase { } public void testPurgeOnlyMarkerFileExists() throws Exception { - final Path configDir = env.configFile().resolve("fake"); - final Path removing = env.pluginsFile().resolve(".removing-fake"); + final Path configDir = env.configDir().resolve("fake"); + final Path removing = env.pluginsDir().resolve(".removing-fake"); Files.createFile(removing); final MockTerminal terminal = removePlugin("fake", home, randomBoolean()); assertFalse(Files.exists(removing)); @@ -213,7 +213,7 @@ public class RemovePluginActionTests extends ESTestCase { public void testNoConfigDirPreserved() throws Exception { createPlugin("fake"); - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); final MockTerminal terminal = removePlugin("fake", home, randomBoolean()); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); } @@ -250,8 +250,8 @@ public class RemovePluginActionTests extends ESTestCase { public void testRemoveWhenRemovingMarker() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); - Files.createFile(env.pluginsFile().resolve(".removing-fake")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); + Files.createFile(env.pluginsDir().resolve(".removing-fake")); removePlugin("fake", home, randomBoolean()); } @@ -262,10 +262,10 @@ public class RemovePluginActionTests extends ESTestCase { public void testRemoveMigratedPluginsWhenInstalled() throws Exception { for (String id : List.of("repository-azure", "repository-gcs", "repository-s3")) { createPlugin(id); - Files.createFile(env.pluginsFile().resolve(id).resolve("plugin.jar")); + Files.createFile(env.pluginsDir().resolve(id).resolve("plugin.jar")); final MockTerminal terminal = removePlugin(id, home, randomBoolean()); - assertThat(Files.exists(env.pluginsFile().resolve(id)), is(false)); + assertThat(Files.exists(env.pluginsDir().resolve(id)), is(false)); // This message shouldn't be printed if plugin was actually installed. assertThat(terminal.getErrorOutput(), not(containsString("plugin [" + id + "] is no longer a plugin"))); } @@ -288,11 +288,11 @@ public class RemovePluginActionTests extends ESTestCase { */ public void testRemoveRegularInstalledPluginAndMigratedUninstalledPlugin() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); final MockTerminal terminal = removePlugin(List.of("fake", "repository-s3"), home, randomBoolean()); - assertThat(Files.exists(env.pluginsFile().resolve("fake")), is(false)); + assertThat(Files.exists(env.pluginsDir().resolve("fake")), is(false)); assertThat(terminal.getErrorOutput(), containsString("plugin [repository-s3] is no longer a plugin")); } diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java index 2d2336428a0a..1a0973616095 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java @@ -55,10 +55,10 @@ public class SyncPluginsActionTests extends ESTestCase { Path home = createTempDir(); Settings settings = Settings.builder().put("path.home", home).build(); env = TestEnvironment.newEnvironment(settings); - Files.createDirectories(env.binFile()); - Files.createFile(env.binFile().resolve("elasticsearch")); - Files.createDirectories(env.configFile()); - Files.createDirectories(env.pluginsFile()); + Files.createDirectories(env.binDir()); + Files.createFile(env.binDir().resolve("elasticsearch")); + Files.createDirectories(env.configDir()); + Files.createDirectories(env.pluginsDir()); terminal = MockTerminal.create(); action = new SyncPluginsAction(terminal, env); @@ -78,7 +78,7 @@ public class SyncPluginsActionTests extends ESTestCase { * then an exception is thrown. */ public void test_ensureNoConfigFile_withConfig_throwsException() throws Exception { - Files.createFile(env.configFile().resolve("elasticsearch-plugins.yml")); + Files.createFile(env.configDir().resolve("elasticsearch-plugins.yml")); final UserException e = expectThrows(UserException.class, () -> SyncPluginsAction.ensureNoConfigFile(env)); assertThat(e.getMessage(), Matchers.matchesPattern("^Plugins config \\[.*] exists.*$")); @@ -354,7 +354,7 @@ public class SyncPluginsActionTests extends ESTestCase { private void createPlugin(String name, String version) throws IOException { PluginTestUtil.writePluginProperties( - env.pluginsFile().resolve(name), + env.pluginsDir().resolve(name), "description", "dummy", "name", diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java index 9430cb598cf0..2ae58040437a 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java @@ -24,7 +24,7 @@ public class KeyStoreLoader implements SecureSettingsLoader { @Override public LoadedSecrets load(Environment environment, Terminal terminal) throws Exception { // See if we have a keystore already present - KeyStoreWrapper secureSettings = KeyStoreWrapper.load(environment.configFile()); + KeyStoreWrapper secureSettings = KeyStoreWrapper.load(environment.configDir()); // If there's no keystore or the keystore has no password, set an empty password var password = (secureSettings == null || secureSettings.hasPassword() == false) ? new SecureString(new char[0]) @@ -35,7 +35,7 @@ public class KeyStoreLoader implements SecureSettingsLoader { @Override public SecureSettings bootstrap(Environment environment, SecureString password) throws Exception { - return KeyStoreWrapper.bootstrap(environment.configFile(), () -> password); + return KeyStoreWrapper.bootstrap(environment.configDir(), () -> password); } @Override diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java index 22b62972befe..be454350133e 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java @@ -150,7 +150,7 @@ class ServerCli extends EnvironmentAwareCommand { throw new UserException(ExitCodes.USAGE, "Multiple --enrollment-token parameters are not allowed"); } - Path log4jConfig = env.configFile().resolve("log4j2.properties"); + Path log4jConfig = env.configDir().resolve("log4j2.properties"); if (Files.exists(log4jConfig) == false) { throw new UserException(ExitCodes.CONFIG, "Missing logging config file at " + log4jConfig); } @@ -239,7 +239,7 @@ class ServerCli extends EnvironmentAwareCommand { } validatePidFile(pidFile); } - return new ServerArgs(daemonize, quiet, pidFile, secrets, env.settings(), env.configFile(), env.logsFile()); + return new ServerArgs(daemonize, quiet, pidFile, secrets, env.settings(), env.configDir(), env.logsDir()); } @Override diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java index 66ee712fcce9..2854d76c110d 100644 --- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java +++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java @@ -43,8 +43,8 @@ class WindowsServiceDaemon extends EnvironmentAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { // the Windows service daemon doesn't support secure settings implementations other than the keystore - try (var loadedSecrets = KeyStoreWrapper.bootstrap(env.configFile(), () -> new SecureString(new char[0]))) { - var args = new ServerArgs(false, true, null, loadedSecrets, env.settings(), env.configFile(), env.logsFile()); + try (var loadedSecrets = KeyStoreWrapper.bootstrap(env.configDir(), () -> new SecureString(new char[0]))) { + var args = new ServerArgs(false, true, null, loadedSecrets, env.settings(), env.configDir(), env.logsDir()); var tempDir = ServerProcessUtils.setupTempDir(processInfo); var jvmOptions = JvmOptionsParser.determineJvmOptions(args, processInfo, tempDir, new MachineDependentHeap()); var serverProcessBuilder = new ServerProcessBuilder().withTerminal(terminal) diff --git a/docs/changelog/119546.yaml b/docs/changelog/119546.yaml new file mode 100644 index 000000000000..017bbb845c0a --- /dev/null +++ b/docs/changelog/119546.yaml @@ -0,0 +1,5 @@ +pr: 119546 +summary: Introduce `FallbackSyntheticSourceBlockLoader` and apply it to keyword fields +area: Mapping +type: enhancement +issues: [] diff --git a/docs/changelog/121396.yaml b/docs/changelog/121396.yaml new file mode 100644 index 000000000000..1d77a8fbb007 --- /dev/null +++ b/docs/changelog/121396.yaml @@ -0,0 +1,5 @@ +pr: 121396 +summary: Change format for Unified Chat +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/121552.yaml b/docs/changelog/121552.yaml new file mode 100644 index 000000000000..c12e7615d124 --- /dev/null +++ b/docs/changelog/121552.yaml @@ -0,0 +1,5 @@ +pr: 121552 +summary: Fix a bug in TOP +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/121559.yaml b/docs/changelog/121559.yaml new file mode 100644 index 000000000000..e3870609a454 --- /dev/null +++ b/docs/changelog/121559.yaml @@ -0,0 +1,6 @@ +pr: 121559 +summary: Skip Usage stats update when ML is disabled +area: Machine Learning +type: bug +issues: + - 121532 diff --git a/docs/changelog/121568.yaml b/docs/changelog/121568.yaml new file mode 100644 index 000000000000..80d769967dc2 --- /dev/null +++ b/docs/changelog/121568.yaml @@ -0,0 +1,6 @@ +pr: 121568 +summary: Analyze API to return 400 for wrong custom analyzer +area: Analysis +type: bug +issues: + - 121443 diff --git a/docs/changelog/121715.yaml b/docs/changelog/121715.yaml new file mode 100644 index 000000000000..837bae57b4c9 --- /dev/null +++ b/docs/changelog/121715.yaml @@ -0,0 +1,5 @@ +pr: 121715 +summary: Fix synthetic source issue with deeply nested ignored source fields +area: Mapping +type: bug +issues: [] diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java index 953d02bccf1e..a4b9767c4c64 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java @@ -20,6 +20,7 @@ public @interface EntitlementTest { enum ExpectedAccess { PLUGINS, ES_MODULES_ONLY, + SERVER_ONLY, ALWAYS_DENIED } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java index dfca49d12267..f2f6bd721e88 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java @@ -13,18 +13,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyBreakIteratorProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCalendarDataProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCalendarNameProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCollatorProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCurrencyNameProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyDateFormatProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyDateFormatSymbolsProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyDecimalFormatSymbolsProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyLocaleNameProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyLocaleServiceProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyNumberFormatProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyTimeZoneNameProvider; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.BaseRestHandler; @@ -59,6 +47,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -75,7 +64,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; @SuppressWarnings("unused") public class RestEntitlementsCheckAction extends BaseRestHandler { private static final Logger logger = LogManager.getLogger(RestEntitlementsCheckAction.class); - public static final Thread NO_OP_SHUTDOWN_HOOK = new Thread(() -> {}, "Shutdown hook for testing"); record CheckAction(CheckedRunnable action, boolean isAlwaysDeniedToPlugins, Integer fromJavaVersion) { /** @@ -94,11 +82,8 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { } } - private static final Map checkActions = Stream.concat( + private static final Map checkActions = Stream.of( Stream.>of( - entry("runtime_exit", deniedToPlugins(RestEntitlementsCheckAction::runtimeExit)), - entry("runtime_halt", deniedToPlugins(RestEntitlementsCheckAction::runtimeHalt)), - entry("system_exit", deniedToPlugins(RestEntitlementsCheckAction::systemExit)), entry("create_classloader", forPlugins(RestEntitlementsCheckAction::createClassLoader)), entry("processBuilder_start", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_start)), entry("processBuilder_startPipeline", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_startPipeline)), @@ -106,27 +91,10 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { entry("set_default_ssl_socket_factory", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLSocketFactory)), entry("set_default_hostname_verifier", alwaysDenied(RestEntitlementsCheckAction::setDefaultHostnameVerifier)), entry("set_default_ssl_context", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLContext)), - entry("system_setIn", alwaysDenied(RestEntitlementsCheckAction::system$$setIn)), - entry("system_setOut", alwaysDenied(RestEntitlementsCheckAction::system$$setOut)), - entry("system_setErr", alwaysDenied(RestEntitlementsCheckAction::system$$setErr)), - entry("runtime_addShutdownHook", alwaysDenied(RestEntitlementsCheckAction::runtime$addShutdownHook)), - entry("runtime_removeShutdownHook", alwaysDenied(RestEntitlementsCheckAction::runtime$$removeShutdownHook)), entry( "thread_setDefaultUncaughtExceptionHandler", alwaysDenied(RestEntitlementsCheckAction::thread$$setDefaultUncaughtExceptionHandler) ), - entry("localeServiceProvider", alwaysDenied(RestEntitlementsCheckAction::localeServiceProvider$)), - entry("breakIteratorProvider", alwaysDenied(RestEntitlementsCheckAction::breakIteratorProvider$)), - entry("collatorProvider", alwaysDenied(RestEntitlementsCheckAction::collatorProvider$)), - entry("dateFormatProvider", alwaysDenied(RestEntitlementsCheckAction::dateFormatProvider$)), - entry("dateFormatSymbolsProvider", alwaysDenied(RestEntitlementsCheckAction::dateFormatSymbolsProvider$)), - entry("decimalFormatSymbolsProvider", alwaysDenied(RestEntitlementsCheckAction::decimalFormatSymbolsProvider$)), - entry("numberFormatProvider", alwaysDenied(RestEntitlementsCheckAction::numberFormatProvider$)), - entry("calendarDataProvider", alwaysDenied(RestEntitlementsCheckAction::calendarDataProvider$)), - entry("calendarNameProvider", alwaysDenied(RestEntitlementsCheckAction::calendarNameProvider$)), - entry("currencyNameProvider", alwaysDenied(RestEntitlementsCheckAction::currencyNameProvider$)), - entry("localeNameProvider", alwaysDenied(RestEntitlementsCheckAction::localeNameProvider$)), - entry("timeZoneNameProvider", alwaysDenied(RestEntitlementsCheckAction::timeZoneNameProvider$)), entry("logManager", alwaysDenied(RestEntitlementsCheckAction::logManager$)), entry("locale_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultLocale)), @@ -230,8 +198,11 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { entry("symbol_lookup_name", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithName, false, 22)), entry("symbol_lookup_path", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithPath, false, 22)) ), - getTestEntries(FileCheckActions.class) + getTestEntries(FileCheckActions.class), + getTestEntries(SpiActions.class), + getTestEntries(SystemActions.class) ) + .flatMap(Function.identity()) .filter(entry -> entry.getValue().fromJavaVersion() == null || Runtime.version().feature() >= entry.getValue().fromJavaVersion()) .collect(Collectors.toUnmodifiableMap(Entry::getKey, Entry::getValue)); @@ -267,7 +238,7 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { } } }; - boolean deniedToPlugins = testAnnotation.expectedAccess() == PLUGINS; + boolean deniedToPlugins = testAnnotation.expectedAccess() != PLUGINS; Integer fromJavaVersion = testAnnotation.fromJavaVersion() == -1 ? null : testAnnotation.fromJavaVersion(); entries.add(entry(method.getName(), new CheckAction(runnable, deniedToPlugins, fromJavaVersion))); } @@ -323,21 +294,6 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { HttpsURLConnection.setDefaultSSLSocketFactory(new DummyImplementations.DummySSLSocketFactory()); } - @SuppressForbidden(reason = "Specifically testing Runtime.exit") - private static void runtimeExit() { - Runtime.getRuntime().exit(123); - } - - @SuppressForbidden(reason = "Specifically testing Runtime.halt") - private static void runtimeHalt() { - Runtime.getRuntime().halt(123); - } - - @SuppressForbidden(reason = "Specifically testing System.exit") - private static void systemExit() { - System.exit(123); - } - private static void createClassLoader() throws IOException { try (var classLoader = new URLClassLoader("test", new URL[0], RestEntitlementsCheckAction.class.getClassLoader())) { logger.info("Created URLClassLoader [{}]", classLoader.getName()); @@ -356,80 +312,10 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { new DummyImplementations.DummyHttpsURLConnection().setSSLSocketFactory(new DummyImplementations.DummySSLSocketFactory()); } - private static void system$$setIn() { - System.setIn(System.in); - } - - @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") - private static void system$$setOut() { - System.setOut(System.out); - } - - @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") - private static void system$$setErr() { - System.setErr(System.err); - } - - private static void runtime$addShutdownHook() { - Runtime.getRuntime().addShutdownHook(NO_OP_SHUTDOWN_HOOK); - } - - private static void runtime$$removeShutdownHook() { - Runtime.getRuntime().removeShutdownHook(NO_OP_SHUTDOWN_HOOK); - } - private static void thread$$setDefaultUncaughtExceptionHandler() { Thread.setDefaultUncaughtExceptionHandler(Thread.getDefaultUncaughtExceptionHandler()); } - private static void localeServiceProvider$() { - new DummyLocaleServiceProvider(); - } - - private static void breakIteratorProvider$() { - new DummyBreakIteratorProvider(); - } - - private static void collatorProvider$() { - new DummyCollatorProvider(); - } - - private static void dateFormatProvider$() { - new DummyDateFormatProvider(); - } - - private static void dateFormatSymbolsProvider$() { - new DummyDateFormatSymbolsProvider(); - } - - private static void decimalFormatSymbolsProvider$() { - new DummyDecimalFormatSymbolsProvider(); - } - - private static void numberFormatProvider$() { - new DummyNumberFormatProvider(); - } - - private static void calendarDataProvider$() { - new DummyCalendarDataProvider(); - } - - private static void calendarNameProvider$() { - new DummyCalendarNameProvider(); - } - - private static void currencyNameProvider$() { - new DummyCurrencyNameProvider(); - } - - private static void localeNameProvider$() { - new DummyLocaleNameProvider(); - } - - private static void timeZoneNameProvider$() { - new DummyTimeZoneNameProvider(); - } - private static void logManager$() { new java.util.logging.LogManager() { }; diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SpiActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SpiActions.java new file mode 100644 index 000000000000..d9ebd1705cb4 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SpiActions.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; + +class SpiActions { + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createBreakIteratorProvider() { + new DummyImplementations.DummyBreakIteratorProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCollatorProvider() { + new DummyImplementations.DummyCollatorProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createDateFormatProvider() { + new DummyImplementations.DummyDateFormatProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createDateFormatSymbolsProvider() { + new DummyImplementations.DummyDateFormatSymbolsProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createDecimalFormatSymbolsProvider() { + new DummyImplementations.DummyDecimalFormatSymbolsProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createNumberFormatProvider() { + new DummyImplementations.DummyNumberFormatProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCalendarDataProvider() { + new DummyImplementations.DummyCalendarDataProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCalendarNameProvider() { + new DummyImplementations.DummyCalendarNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCurrencyNameProvider() { + new DummyImplementations.DummyCurrencyNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createLocaleNameProvider() { + new DummyImplementations.DummyLocaleNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createTimeZoneNameProvider() { + new DummyImplementations.DummyTimeZoneNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createLocaleServiceProvider() { + new DummyImplementations.DummyLocaleServiceProvider(); + } + + private SpiActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SystemActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SystemActions.java new file mode 100644 index 000000000000..4df1b1dd26d6 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SystemActions.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.core.SuppressForbidden; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.SERVER_ONLY; + +class SystemActions { + + @SuppressForbidden(reason = "Specifically testing Runtime.exit") + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void runtimeExit() { + Runtime.getRuntime().exit(123); + } + + @SuppressForbidden(reason = "Specifically testing Runtime.halt") + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void runtimeHalt() { + Runtime.getRuntime().halt(123); + } + + @SuppressForbidden(reason = "Specifically testing System.exit") + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void systemExit() { + System.exit(123); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void systemSetIn() { + System.setIn(System.in); + } + + @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void systemSetOut() { + System.setOut(System.out); + } + + @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void systemSetErr() { + System.setErr(System.err); + } + + private static final Thread NO_OP_SHUTDOWN_HOOK = new Thread(() -> {}, "Shutdown hook for testing"); + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void runtimeAddShutdownHook() { + Runtime.getRuntime().addShutdownHook(NO_OP_SHUTDOWN_HOOK); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void runtimeRemoveShutdownHook() { + Runtime.getRuntime().removeShutdownHook(NO_OP_SHUTDOWN_HOOK); + } + + private SystemActions() {} +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java index b58e0d2fb87e..fef7b5d11aff 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java @@ -22,7 +22,7 @@ import java.lang.annotation.Target; * using this annotation is considered parseable as part of a policy file * for entitlements. */ -@Target(ElementType.CONSTRUCTOR) +@Target({ ElementType.CONSTRUCTOR, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) public @interface ExternalEntitlement { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java index d574609d1321..3333eefa4f71 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java @@ -17,8 +17,11 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; +import static org.elasticsearch.core.PathUtils.getDefaultFileSystem; + public final class FileAccessTree { public static final FileAccessTree EMPTY = new FileAccessTree(List.of()); + private static final String FILE_SEPARATOR = getDefaultFileSystem().getSeparator(); private final String[] readPaths; private final String[] writePaths; @@ -27,11 +30,11 @@ public final class FileAccessTree { List readPaths = new ArrayList<>(); List writePaths = new ArrayList<>(); for (FileEntitlement fileEntitlement : fileEntitlements) { - var mode = fileEntitlement.mode(); - if (mode == FileEntitlement.Mode.READ_WRITE) { - writePaths.add(fileEntitlement.path()); + String path = normalizePath(Path.of(fileEntitlement.path())); + if (fileEntitlement.mode() == FileEntitlement.Mode.READ_WRITE) { + writePaths.add(path); } - readPaths.add(fileEntitlement.path()); + readPaths.add(path); } readPaths.sort(String::compareTo); @@ -46,14 +49,20 @@ public final class FileAccessTree { } boolean canRead(Path path) { - return checkPath(normalize(path), readPaths); + return checkPath(normalizePath(path), readPaths); } boolean canWrite(Path path) { - return checkPath(normalize(path), writePaths); + return checkPath(normalizePath(path), writePaths); } - private static String normalize(Path path) { + /** + * @return the "canonical" form of the given {@code path}, to be used for entitlement checks. + */ + static String normalizePath(Path path) { + // Note that toAbsolutePath produces paths separated by the default file separator, + // so on Windows, if the given path uses forward slashes, this consistently + // converts it to backslashes. return path.toAbsolutePath().normalize().toString(); } @@ -64,7 +73,7 @@ public final class FileAccessTree { int ndx = Arrays.binarySearch(paths, path); if (ndx < -1) { String maybeParent = paths[-ndx - 2]; - return path.startsWith(maybeParent); + return path.startsWith(maybeParent) && path.startsWith(FILE_SEPARATOR, maybeParent.length()); } return ndx >= 0; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java index 992728b68186..2d3468165a59 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java @@ -27,6 +27,8 @@ import java.io.InputStream; import java.io.UncheckedIOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -147,6 +149,7 @@ public class PolicyParser { } Constructor entitlementConstructor = null; + Method entitlementMethod = null; ExternalEntitlement entitlementMetadata = null; for (var ctor : entitlementClass.getConstructors()) { var metadata = ctor.getAnnotation(ExternalEntitlement.class); @@ -161,8 +164,27 @@ public class PolicyParser { entitlementConstructor = ctor; entitlementMetadata = metadata; } - } + for (var method : entitlementClass.getMethods()) { + var metadata = method.getAnnotation(ExternalEntitlement.class); + if (metadata != null) { + if (Modifier.isStatic(method.getModifiers()) == false) { + throw new IllegalStateException( + "entitlement class [" + entitlementClass.getName() + "] has non-static method annotated with ExternalEntitlement" + ); + } + if (entitlementMetadata != null) { + throw new IllegalStateException( + "entitlement class [" + + entitlementClass.getName() + + "] has more than one constructor and/or method annotated with ExternalEntitlement" + ); + } + entitlementMethod = method; + entitlementMetadata = metadata; + } + } + if (entitlementMetadata == null) { throw newPolicyParserException(scopeName, "unknown entitlement type [" + entitlementType + "]"); } @@ -171,7 +193,9 @@ public class PolicyParser { throw newPolicyParserException("entitlement type [" + entitlementType + "] is allowed only on modules"); } - Class[] parameterTypes = entitlementConstructor.getParameterTypes(); + Class[] parameterTypes = entitlementConstructor != null + ? entitlementConstructor.getParameterTypes() + : entitlementMethod.getParameterTypes(); String[] parametersNames = entitlementMetadata.parameterNames(); if (parameterTypes.length != 0 || parametersNames.length != 0) { @@ -204,7 +228,11 @@ public class PolicyParser { } try { - return (Entitlement) entitlementConstructor.newInstance(parameterValues); + if (entitlementConstructor != null) { + return (Entitlement) entitlementConstructor.newInstance(parameterValues); + } else { + return (Entitlement) entitlementMethod.invoke(null, parameterValues); + } } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { if (e.getCause() instanceof PolicyValidationException piae) { throw newPolicyParserException(startLocation, scopeName, entitlementType, piae); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FileEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FileEntitlement.java index f3a0ee1758a0..01d882e4d9e2 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FileEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FileEntitlement.java @@ -12,10 +12,12 @@ package org.elasticsearch.entitlement.runtime.policy.entitlements; import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; import org.elasticsearch.entitlement.runtime.policy.PolicyValidationException; -import java.nio.file.Paths; - /** - * Describes a file entitlement with a path and mode. + * Describes entitlement to access files at a particular location. + * + * @param path the location of the files. For directories, implicitly includes access to + * all contained files and (recursively) subdirectories. + * @param mode the type of operation */ public record FileEntitlement(String path, Mode mode) implements Entitlement { @@ -24,14 +26,6 @@ public record FileEntitlement(String path, Mode mode) implements Entitlement { READ_WRITE } - public FileEntitlement { - path = normalizePath(path); - } - - private static String normalizePath(String path) { - return Paths.get(path).toAbsolutePath().normalize().toString(); - } - private static Mode parseMode(String mode) { if (mode.equals("read")) { return Mode.READ; @@ -43,7 +37,7 @@ public record FileEntitlement(String path, Mode mode) implements Entitlement { } @ExternalEntitlement(parameterNames = { "path", "mode" }, esModulesOnly = false) - public FileEntitlement(String path, String mode) { - this(path, parseMode(mode)); + public static FileEntitlement create(String path, String mode) { + return new FileEntitlement(path, parseMode(mode)); } } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java index c133cf0f1242..48c03cfd2f9b 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java @@ -16,6 +16,7 @@ import org.junit.BeforeClass; import java.nio.file.Path; import java.util.List; +import static org.elasticsearch.core.PathUtils.getDefaultFileSystem; import static org.hamcrest.Matchers.is; public class FileAccessTreeTests extends ESTestCase { @@ -41,7 +42,9 @@ public class FileAccessTreeTests extends ESTestCase { var tree = FileAccessTree.of(List.of(entitlement("foo", "read"))); assertThat(tree.canRead(path("foo")), is(true)); assertThat(tree.canRead(path("foo/subdir")), is(true)); + assertThat(tree.canRead(path("food")), is(false)); assertThat(tree.canWrite(path("foo")), is(false)); + assertThat(tree.canWrite(path("food")), is(false)); assertThat(tree.canRead(path("before")), is(false)); assertThat(tree.canRead(path("later")), is(false)); @@ -51,7 +54,9 @@ public class FileAccessTreeTests extends ESTestCase { var tree = FileAccessTree.of(List.of(entitlement("foo", "read_write"))); assertThat(tree.canWrite(path("foo")), is(true)); assertThat(tree.canWrite(path("foo/subdir")), is(true)); + assertThat(tree.canWrite(path("food")), is(false)); assertThat(tree.canRead(path("foo")), is(true)); + assertThat(tree.canRead(path("food")), is(false)); assertThat(tree.canWrite(path("before")), is(false)); assertThat(tree.canWrite(path("later")), is(false)); @@ -83,8 +88,24 @@ public class FileAccessTreeTests extends ESTestCase { assertThat(tree.canRead(path("")), is(false)); } + public void testForwardSlashes() { + String sep = getDefaultFileSystem().getSeparator(); + var tree = FileAccessTree.of(List.of(entitlement("a/b", "read"), entitlement("m" + sep + "n", "read"))); + + // Native separators work + assertThat(tree.canRead(path("a" + sep + "b")), is(true)); + assertThat(tree.canRead(path("m" + sep + "n")), is(true)); + + // Forward slashes also work + assertThat(tree.canRead(path("a/b")), is(true)); + assertThat(tree.canRead(path("m/n")), is(true)); + + // In case the native separator is a backslash, don't treat that as an escape + assertThat(tree.canRead(path("m\n")), is(false)); + } + FileEntitlement entitlement(String path, String mode) { Path p = path(path); - return new FileEntitlement(p.toString(), mode); + return FileEntitlement.create(p.toString(), mode); } } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java index 53cd5ee8aae0..85bffda369f3 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java @@ -40,6 +40,35 @@ public class PolicyParserTests extends ESTestCase { public ManyConstructorsEntitlement(int i) {} } + public static class ManyMethodsEntitlement implements Entitlement { + @ExternalEntitlement + public static ManyMethodsEntitlement create(String s) { + return new ManyMethodsEntitlement(); + } + + @ExternalEntitlement + public static ManyMethodsEntitlement create(int i) { + return new ManyMethodsEntitlement(); + } + } + + public static class ConstructorAndMethodEntitlement implements Entitlement { + @ExternalEntitlement + public static ConstructorAndMethodEntitlement create(String s) { + return new ConstructorAndMethodEntitlement(s); + } + + @ExternalEntitlement + public ConstructorAndMethodEntitlement(String s) {} + } + + public static class NonStaticMethodEntitlement implements Entitlement { + @ExternalEntitlement + public NonStaticMethodEntitlement create() { + return new NonStaticMethodEntitlement(); + } + } + public void testGetEntitlementTypeName() { assertEquals("create_class_loader", PolicyParser.getEntitlementTypeName(CreateClassLoaderEntitlement.class)); @@ -55,7 +84,7 @@ public class PolicyParserTests extends ESTestCase { .parsePolicy(); Policy expected = new Policy( "test-policy.yaml", - List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", "read_write")))) + List.of(new Scope("entitlement-module-name", List.of(FileEntitlement.create("test/path/to/file", "read_write")))) ); assertEquals(expected, parsedPolicy); } @@ -65,7 +94,7 @@ public class PolicyParserTests extends ESTestCase { .parsePolicy(); Policy expected = new Policy( "test-policy.yaml", - List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", "read_write")))) + List.of(new Scope("entitlement-module-name", List.of(FileEntitlement.create("test/path/to/file", "read_write")))) ); assertEquals(expected, parsedPolicy); } @@ -174,4 +203,60 @@ public class PolicyParserTests extends ESTestCase { ) ); } + + public void testMultipleMethodsAnnotated() throws IOException { + var parser = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - many_methods + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", true, Map.of("many_methods", ManyMethodsEntitlement.class)); + + var e = expectThrows(IllegalStateException.class, parser::parsePolicy); + assertThat( + e.getMessage(), + equalTo( + "entitlement class " + + "[org.elasticsearch.entitlement.runtime.policy.PolicyParserTests$ManyMethodsEntitlement]" + + " has more than one constructor and/or method annotated with ExternalEntitlement" + ) + ); + } + + public void testConstructorAndMethodAnnotated() throws IOException { + var parser = new PolicyParser( + new ByteArrayInputStream(""" + entitlement-module-name: + - constructor_and_method + """.getBytes(StandardCharsets.UTF_8)), + "test-policy.yaml", + true, + Map.of("constructor_and_method", ConstructorAndMethodEntitlement.class) + ); + + var e = expectThrows(IllegalStateException.class, parser::parsePolicy); + assertThat( + e.getMessage(), + equalTo( + "entitlement class " + + "[org.elasticsearch.entitlement.runtime.policy.PolicyParserTests$ConstructorAndMethodEntitlement]" + + " has more than one constructor and/or method annotated with ExternalEntitlement" + ) + ); + } + + public void testNonStaticMethodAnnotated() throws IOException { + var parser = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - non_static + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", true, Map.of("non_static", NonStaticMethodEntitlement.class)); + + var e = expectThrows(IllegalStateException.class, parser::parsePolicy); + assertThat( + e.getMessage(), + equalTo( + "entitlement class " + + "[org.elasticsearch.entitlement.runtime.policy.PolicyParserTests$NonStaticMethodEntitlement]" + + " has non-static method annotated with ExternalEntitlement" + ) + ); + } } diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index 173e1eeef60a..0c8821f29dbf 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -20,7 +20,7 @@ esplugin { restResources { restApi { - include '_common', 'indices', 'index', 'cluster', 'search', 'nodes', 'bulk', 'termvectors', 'explain', 'count' + include '_common', 'indices', 'index', 'cluster', 'search', 'nodes', 'bulk', 'termvectors', 'explain', 'count', 'capabilities' } } diff --git a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java index 40a7b64bc91e..a385db95d882 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java @@ -207,7 +207,7 @@ public class ReloadAnalyzerTests extends ESSingleNodeTestCase { public void testUpdateableSynonymsRejectedAtIndexTime() throws FileNotFoundException, IOException { String synonymsFileName = "synonyms.txt"; setupResourceFile(synonymsFileName, "foo, baz"); - Path configDir = node().getEnvironment().configFile(); + Path configDir = node().getEnvironment().configDir(); if (Files.exists(configDir) == false) { Files.createDirectory(configDir); } @@ -319,7 +319,7 @@ public class ReloadAnalyzerTests extends ESSingleNodeTestCase { } private Path setupResourceFile(String fileName, String... content) throws IOException { - Path configDir = node().getEnvironment().configFile(); + Path configDir = node().getEnvironment().configDir(); if (Files.exists(configDir) == false) { Files.createDirectory(configDir); } diff --git a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java index 8209d9f543a3..06f19c0d60db 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java @@ -57,7 +57,7 @@ public class ReloadSynonymAnalyzerIT extends ESIntegTestCase { } private void testSynonymsUpdate(boolean preview) throws FileNotFoundException, IOException, InterruptedException { - Path config = internalCluster().getInstance(Environment.class).configFile(); + Path config = internalCluster().getInstance(Environment.class).configDir(); String synonymsFileName = "synonyms.txt"; Path synonymsFile = config.resolve(synonymsFileName); writeFile(synonymsFile, "foo, baz"); @@ -106,7 +106,7 @@ public class ReloadSynonymAnalyzerIT extends ESIntegTestCase { final String synonymsFileName = "synonyms.txt"; final String fieldName = "field"; - Path config = internalCluster().getInstance(Environment.class).configFile(); + Path config = internalCluster().getInstance(Environment.class).configDir(); Path synonymsFile = config.resolve(synonymsFileName); writeFile(synonymsFile, "foo, baz"); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java index e091f0175009..92e2b3085cc2 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java @@ -40,7 +40,7 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW throw new IllegalArgumentException("hyphenation_patterns_path is a required setting."); } - Path hyphenationPatternsFile = env.configFile().resolve(hyphenationPatternsPath); + Path hyphenationPatternsFile = env.configDir().resolve(hyphenationPatternsPath); try { InputStream in = Files.newInputStream(hyphenationPatternsFile); diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml index 971f530cebeb..24e04174cd1e 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml @@ -59,3 +59,28 @@ - match: { detail.tokenizer.tokens.0.token: ABc } - match: { detail.tokenfilters.0.name: lowercase } - match: { detail.tokenfilters.0.tokens.0.token: abc } + +--- +"Custom analyzer is not buildable": + - requires: + test_runner_features: [ capabilities ] + reason: This capability required to run test + capabilities: + - method: GET + path: /_analyze + capabilities: [ wrong_custom_analyzer_returns_400 ] + + - do: + catch: bad_request + indices.analyze: + body: + text: the foxes jumping quickly + tokenizer: + standard + filter: + type: hunspell + locale: en_US + + - match: { status: 400 } + - match: { error.type: illegal_argument_exception } + - match: { error.reason: "Can not build a custom analyzer" } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java index e3d767c92a68..e1fcfdec7b03 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java @@ -251,7 +251,7 @@ public class DataStreamGetWriteIndexTests extends ESTestCase { MetadataCreateIndexService createIndexService; { Environment env = mock(Environment.class); - when(env.sharedDataFile()).thenReturn(null); + when(env.sharedDataDir()).thenReturn(null); AllocationService allocationService = mock(AllocationService.class); when(allocationService.reroute(any(ClusterState.class), any(String.class), any())).then(i -> i.getArguments()[0]); when(allocationService.getShardRoutingRoleStrategy()).thenReturn(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 4d70a83c6752..3bcc9f435534 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -664,7 +664,7 @@ public class GeoIpDownloaderIT extends AbstractGeoIpIT { .map(DiscoveryNode::getId) .collect(Collectors.toSet()); // All nodes share the same geoip base dir in the shared tmp dir: - Path geoipBaseTmpDir = internalCluster().getDataNodeInstance(Environment.class).tmpFile().resolve("geoip-databases"); + Path geoipBaseTmpDir = internalCluster().getDataNodeInstance(Environment.class).tmpDir().resolve("geoip-databases"); assertThat(Files.exists(geoipBaseTmpDir), is(true)); final List geoipTmpDirs; try (Stream files = Files.list(geoipBaseTmpDir)) { @@ -676,7 +676,7 @@ public class GeoIpDownloaderIT extends AbstractGeoIpIT { private void setupDatabasesInConfigDirectory() throws Exception { StreamSupport.stream(internalCluster().getInstances(Environment.class).spliterator(), false) - .map(Environment::configFile) + .map(Environment::configDir) .map(path -> path.resolve("ingest-geoip")) .distinct() .forEach(path -> { @@ -704,7 +704,7 @@ public class GeoIpDownloaderIT extends AbstractGeoIpIT { private void deleteDatabasesInConfigDirectory() throws Exception { StreamSupport.stream(internalCluster().getInstances(Environment.class).spliterator(), false) - .map(Environment::configFile) + .map(Environment::configDir) .map(path -> path.resolve("ingest-geoip")) .distinct() .forEach(path -> { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java index 3d2b54b04695..289008236a85 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java @@ -42,7 +42,7 @@ final class ConfigDatabases implements Closeable { private final ConcurrentMap configDatabases; ConfigDatabases(Environment environment, GeoIpCache cache) { - this(environment.configFile().resolve("ingest-geoip"), cache); + this(environment.configDir().resolve("ingest-geoip"), cache); } ConfigDatabases(Path geoipConfigDir, GeoIpCache cache) { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java index ba4ec9e5fb07..614a81da08f4 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java @@ -116,7 +116,7 @@ public final class DatabaseNodeService implements IpDatabaseProvider { ClusterService clusterService ) { this( - environment.tmpFile(), + environment.tmpDir(), new OriginSettingClient(client, IngestService.INGEST_ORIGIN), cache, new ConfigDatabases(environment, cache), diff --git a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java index fd1e5f2f6247..902114234096 100644 --- a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java +++ b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java @@ -41,7 +41,7 @@ public class IngestUserAgentPlugin extends Plugin implements IngestPlugin { @Override public Map getProcessors(Processor.Parameters parameters) { - Path userAgentConfigDirectory = parameters.env.configFile().resolve("ingest-user-agent"); + Path userAgentConfigDirectory = parameters.env.configDir().resolve("ingest-user-agent"); if (Files.exists(userAgentConfigDirectory) == false && Files.isDirectory(userAgentConfigDirectory)) { throw new IllegalStateException( diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java index 914311e1190c..ceba20570e7e 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java @@ -106,7 +106,7 @@ public class ReindexSslConfig { return settings.getAsList(key); } }; - configuration = loader.load(environment.configFile()); + configuration = loader.load(environment.configDir()); reload(); final FileChangesListener listener = new FileChangesListener() { diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index a8a6986ccbb7..f1369bae6e30 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -369,7 +369,7 @@ class S3Service implements Closeable { } // Make sure that a readable symlink to the token file exists in the plugin config directory // AWS_WEB_IDENTITY_TOKEN_FILE exists but we only use Web Identity Tokens if a corresponding symlink exists and is readable - Path webIdentityTokenFileSymlink = environment.configFile().resolve(WEB_IDENTITY_TOKEN_FILE_LOCATION); + Path webIdentityTokenFileSymlink = environment.configDir().resolve(WEB_IDENTITY_TOKEN_FILE_LOCATION); if (Files.exists(webIdentityTokenFileSymlink) == false) { LOGGER.warn( "Cannot use AWS Web Identity Tokens: AWS_WEB_IDENTITY_TOKEN_FILE is defined but no corresponding symlink exists " diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java index 69fd0c0f5d6a..2698eb718ded 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java @@ -65,7 +65,7 @@ public class CustomWebIdentityTokenCredentialsProviderTests extends ESTestCase { Files.createDirectory(configDirectory.resolve("repository-s3")); Files.writeString(configDirectory.resolve("repository-s3/aws-web-identity-token-file"), "YXdzLXdlYi1pZGVudGl0eS10b2tlbi1maWxl"); Environment environment = Mockito.mock(Environment.class); - Mockito.when(environment.configFile()).thenReturn(configDirectory); + Mockito.when(environment.configDir()).thenReturn(configDirectory); return environment; } @@ -212,7 +212,7 @@ public class CustomWebIdentityTokenCredentialsProviderTests extends ESTestCase { latch.countDown(); } }); - Files.writeString(environment.configFile().resolve("repository-s3/aws-web-identity-token-file"), newWebIdentityToken); + Files.writeString(environment.configDir().resolve("repository-s3/aws-web-identity-token-file"), newWebIdentityToken); safeAwait(latch); assertCredentials(awsCredentialsProvider.getCredentials()); diff --git a/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java b/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java index 51a223315644..eca846f955bf 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java @@ -158,7 +158,7 @@ public class URLRepository extends BlobStoreRepository { if (normalizedUrl == null) { String logMessage = "The specified url [{}] doesn't start with any repository paths specified by the " + "path.repo setting or by {} setting: [{}] "; - logger.warn(logMessage, urlToCheck, ALLOWED_URLS_SETTING.getKey(), environment.repoFiles()); + logger.warn(logMessage, urlToCheck, ALLOWED_URLS_SETTING.getKey(), environment.repoDirs()); String exceptionMessage = "file url [" + urlToCheck + "] doesn't match any of the locations specified by path.repo or " diff --git a/muted-tests.yml b/muted-tests.yml index 2326972a83c8..1952e34c0897 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -134,6 +134,12 @@ tests: - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/120_data_streams_stats/Multiple data stream} issue: https://github.com/elastic/elasticsearch/issues/118217 + # TODO: re-enable after backporting https://github.com/elastic/elasticsearch/pull/119110 +- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT + method: test {yaml=update/100_synthetic_source/keyword} + # TODO: re-enable after backporting https://github.com/elastic/elasticsearch/pull/119110 +- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT + method: test {yaml=update/100_synthetic_source/stored text} - class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests method: testSearcherId issue: https://github.com/elastic/elasticsearch/issues/118374 @@ -385,9 +391,11 @@ tests: - class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryStopIT method: testStopQueryLocal issue: https://github.com/elastic/elasticsearch/issues/121672 -- class: org.elasticsearch.xpack.esql.heap_attack.HeapAttackIT - method: testLookupExplosionBigStringManyMatches - issue: https://github.com/elastic/elasticsearch/issues/121465 +- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT + issue: https://github.com/elastic/elasticsearch/issues/121411 +- class: org.elasticsearch.transport.InboundHandlerTests + method: testLogsSlowInboundProcessing + issue: https://github.com/elastic/elasticsearch/issues/121816 # Examples: # diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java index fe0b3a00b2bb..6854984c49c2 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java @@ -51,7 +51,7 @@ public class IcuCollationTokenFilterFactory extends AbstractTokenFilterFactory { if (rules != null) { Exception failureToResolve = null; try { - rules = Streams.copyToString(Files.newBufferedReader(environment.configFile().resolve(rules), Charset.forName("UTF-8"))); + rules = Streams.copyToString(Files.newBufferedReader(environment.configDir().resolve(rules), Charset.forName("UTF-8"))); } catch (IOException | SecurityException | InvalidPathException e) { failureToResolve = e; } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java index c66d25ffa2f3..4a0ead6a893e 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java @@ -99,7 +99,7 @@ public class IcuTokenizerFactory extends AbstractTokenizerFactory { // parse a single RBBi rule file private static BreakIterator parseRules(String filename, Environment env) throws IOException { - final Path path = env.configFile().resolve(filename); + final Path path = env.configDir().resolve(filename); String rules = Files.readAllLines(path).stream().filter((v) -> v.startsWith("#") == false).collect(Collectors.joining("\n")); return new RuleBasedBreakIterator(rules.toString()); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java index ce6acd79a0bb..e74d1a87959f 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java @@ -81,7 +81,7 @@ class HdfsSecurityContext { * Expects keytab file to exist at {@code $CONFIG_DIR$/repository-hdfs/krb5.keytab} */ static Path locateKeytabFile(Environment environment) { - Path keytabPath = environment.configFile().resolve("repository-hdfs").resolve("krb5.keytab"); + Path keytabPath = environment.configDir().resolve("repository-hdfs").resolve("krb5.keytab"); try { if (Files.exists(keytabPath) == false) { throw new RuntimeException("Could not locate keytab at [" + keytabPath + "]."); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java index 08d7e3b45702..bc8308f48e52 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java @@ -103,23 +103,23 @@ public class EvilSecurityTests extends ESTestCase { // check that all directories got permissions: // bin file: ro - assertExactPermissions(new FilePermission(environment.binFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.binDir().toString(), "read,readlink"), permissions); // lib file: ro - assertExactPermissions(new FilePermission(environment.libFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.libDir().toString(), "read,readlink"), permissions); // modules file: ro - assertExactPermissions(new FilePermission(environment.modulesFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.modulesDir().toString(), "read,readlink"), permissions); // config file: ro - assertExactPermissions(new FilePermission(environment.configFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.configDir().toString(), "read,readlink"), permissions); // plugins: ro - assertExactPermissions(new FilePermission(environment.pluginsFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.pluginsDir().toString(), "read,readlink"), permissions); // data paths: r/w - for (Path dataPath : environment.dataFiles()) { + for (Path dataPath : environment.dataDirs()) { assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions); } - assertExactPermissions(new FilePermission(environment.sharedDataFile().toString(), "read,readlink,write,delete"), permissions); + assertExactPermissions(new FilePermission(environment.sharedDataDir().toString(), "read,readlink,write,delete"), permissions); // logs: r/w - assertExactPermissions(new FilePermission(environment.logsFile().toString(), "read,readlink,write,delete"), permissions); + assertExactPermissions(new FilePermission(environment.logsDir().toString(), "read,readlink,write,delete"), permissions); // temp dir: r/w assertExactPermissions(new FilePermission(fakeTmpDir.toString(), "read,readlink,write,delete"), permissions); } diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 168493eb52f6..5c26a744b2fb 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -80,8 +80,8 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { Environment environment = TestEnvironment.newEnvironment(settings); // This plugin will NOT have a controller daemon - Path plugin = environment.modulesFile().resolve("a_plugin"); - Files.createDirectories(environment.modulesFile()); + Path plugin = environment.modulesDir().resolve("a_plugin"); + Files.createDirectories(environment.modulesDir()); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( plugin, @@ -111,8 +111,8 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { * Two plugins - one with a controller daemon and one without. */ public void testControllerSpawn() throws Exception { - assertControllerSpawns(Environment::pluginsFile, false); - assertControllerSpawns(Environment::modulesFile, true); + assertControllerSpawns(Environment::pluginsDir, false); + assertControllerSpawns(Environment::modulesDir, true); } private void assertControllerSpawns(final Function pluginsDirFinder, boolean expectSpawn) throws Exception { @@ -131,8 +131,8 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { // this plugin will have a controller daemon Path plugin = pluginsDirFinder.apply(environment).resolve("test_plugin"); - Files.createDirectories(environment.modulesFile()); - Files.createDirectories(environment.pluginsFile()); + Files.createDirectories(environment.modulesDir()); + Files.createDirectories(environment.pluginsDir()); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( plugin, @@ -217,7 +217,7 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { Environment environment = TestEnvironment.newEnvironment(settings); - Path plugin = environment.modulesFile().resolve("test_plugin"); + Path plugin = environment.modulesDir().resolve("test_plugin"); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( plugin, @@ -250,10 +250,10 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { final Environment environment = TestEnvironment.newEnvironment(settings); - Files.createDirectories(environment.modulesFile()); - Files.createDirectories(environment.pluginsFile()); + Files.createDirectories(environment.modulesDir()); + Files.createDirectories(environment.pluginsDir()); - final Path desktopServicesStore = environment.modulesFile().resolve(".DS_Store"); + final Path desktopServicesStore = environment.modulesDir().resolve(".DS_Store"); Files.createFile(desktopServicesStore); final Spawner spawner = new Spawner(); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index 30367bf55d8c..c0f12f95269e 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -82,8 +82,7 @@ public class DesiredNodesUpgradeIT extends AbstractRollingUpgradeTestCase { Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), randomDoubleProcessorCount(), ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - null + ByteSizeValue.ofGb(randomIntBetween(128, 256)) ) ) .toList(); @@ -94,8 +93,7 @@ public class DesiredNodesUpgradeIT extends AbstractRollingUpgradeTestCase { Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), new DesiredNode.ProcessorsRange(minProcessors, minProcessors + randomIntBetween(10, 20)), ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - null + ByteSizeValue.ofGb(randomIntBetween(128, 256)) ); }).toList(); } diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java index b1e28de1a526..683990d51d4a 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java @@ -10,7 +10,6 @@ package org.elasticsearch.http.snapshots; import org.apache.http.client.methods.HttpGet; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; @@ -37,7 +36,6 @@ import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -516,10 +514,9 @@ public class RestGetSnapshotsIT extends AbstractSnapshotRestTestCase { true, (args) -> new GetSnapshotsResponse( (List) args[0], - (Map) args[1], - (String) args[2], - args[3] == null ? UNKNOWN_COUNT : (int) args[3], - args[4] == null ? UNKNOWN_COUNT : (int) args[4] + (String) args[1], + args[2] == null ? UNKNOWN_COUNT : (int) args[2], + args[3] == null ? UNKNOWN_COUNT : (int) args[3] ) ); @@ -529,11 +526,6 @@ public class RestGetSnapshotsIT extends AbstractSnapshotRestTestCase { (p, c) -> SnapshotInfoUtils.snapshotInfoFromXContent(p), new ParseField("snapshots") ); - GET_SNAPSHOT_PARSER.declareObject( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> p.map(HashMap::new, ElasticsearchException::fromXContent), - new ParseField("failures") - ); GET_SNAPSHOT_PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("next")); GET_SNAPSHOT_PARSER.declareIntOrNull(ConstructingObjectParser.optionalConstructorArg(), UNKNOWN_COUNT, new ParseField("total")); GET_SNAPSHOT_PARSER.declareIntOrNull(ConstructingObjectParser.optionalConstructorArg(), UNKNOWN_COUNT, new ParseField("remaining")); diff --git a/qa/smoke-test-multinode/build.gradle b/qa/smoke-test-multinode/build.gradle index cc71a99562eb..14e102025cac 100644 --- a/qa/smoke-test-multinode/build.gradle +++ b/qa/smoke-test-multinode/build.gradle @@ -28,5 +28,7 @@ tasks.named("yamlRestTest").configure { 'cat.templates/10_basic/No templates', 'cat.templates/10_basic/Sort templates', 'cat.templates/10_basic/Multiple template', + 'update/100_synthetic_source/keyword', + 'update/100_synthetic_source/stored text' ].join(',') } diff --git a/renovate.json b/renovate.json index 29406c488f94..53919e027dc7 100644 --- a/renovate.json +++ b/renovate.json @@ -30,8 +30,8 @@ "matchDatasources": [ "docker" ], - "matchPackagePatterns": [ - "^docker.elastic.co/wolfi/chainguard-base$" + "matchPackageNames": [ + "/^docker.elastic.co/wolfi/chainguard-base$/" ] } ], diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 67f0b5a0714a..3bdaf029c364 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -74,4 +74,12 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("index/91_metrics_no_subobjects/Metrics object indexing with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("index/91_metrics_no_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("indices.create/20_synthetic_source/synthetic_source with copy_to inside nested object", "temporary until backported") + task.skipTest( + "cluster.desired_nodes/10_basic/Test delete desired nodes with node_version generates a warning", + "node_version warning is removed in 9.0" + ) + task.skipTest( + "cluster.desired_nodes/10_basic/Test update desired nodes with node_version generates a warning", + "node_version warning is removed in 9.0" + ) }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml index 1d1aa524ffb2..a45146a4e147 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml @@ -59,61 +59,6 @@ teardown: - contains: { nodes: { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb" } } - contains: { nodes: { settings: { node: { name: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb" } } --- -"Test update desired nodes with node_version generates a warning": - - skip: - reason: "contains is a newly added assertion" - features: ["contains", "allowed_warnings"] - - do: - cluster.state: {} - - # Get master node id - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.version: es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - match: - $body: - history_id: "test" - version: 1 - nodes: - - { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 2 - body: - nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - - { settings: { "node.name": "instance-000188" }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - - match: { history_id: "test" } - - match: { version: 2 } - - length: { nodes: 2 } - - contains: { nodes: { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } } - - contains: { nodes: { settings: { node: { name: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } } ---- "Test update move to a new history id": - skip: reason: "contains is a newly added assertion" @@ -199,46 +144,6 @@ teardown: _internal.get_desired_nodes: {} - match: { status: 404 } --- -"Test delete desired nodes with node_version generates a warning": - - skip: - features: allowed_warnings - - do: - cluster.state: {} - - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.version: es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - match: - $body: - history_id: "test" - version: 1 - nodes: - - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - - - do: - _internal.delete_desired_nodes: {} - - - do: - catch: missing - _internal.get_desired_nodes: {} - - match: { status: 404 } ---- "Test update desired nodes is idempotent": - skip: reason: "contains is a newly added assertion" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/100_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/100_synthetic_source.yml index f4894692b6ca..219bc52c4e28 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/100_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/100_synthetic_source.yml @@ -6,8 +6,8 @@ setup: --- keyword: - requires: - cluster_features: ["gte_v8.4.0"] - reason: introduced in 8.4.0 + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source - do: indices.create: @@ -60,13 +60,14 @@ keyword: index: test run_expensive_tasks: true - is_false: test.fields._source - - is_true: test.fields._recovery_source + # When synthetic source is used there is no _recovery_source field + - match: { test.fields._recovery_source: null } --- stored text: - requires: - cluster_features: ["gte_v8.5.0"] - reason: introduced in 8.5.0 + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source - do: indices.create: @@ -121,4 +122,5 @@ stored text: index: test run_expensive_tasks: true - is_false: test.fields._source - - is_true: test.fields._recovery_source + # When synthetic source is used there is no _recovery_source field + - match: { test.fields._recovery_source: null } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java index a5aa39f5feb1..83e79ff7f45a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java @@ -85,7 +85,7 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { final Environment environment = internalCluster().getInstance(Environment.class); final AtomicReference reloadSettingsError = new AtomicReference<>(); // keystore file should be missing for this test case - Files.deleteIfExists(KeyStoreWrapper.keystorePath(environment.configFile())); + Files.deleteIfExists(KeyStoreWrapper.keystorePath(environment.configDir())); final int initialReloadCount = mockReloadablePlugin.getReloadCount(); final CountDownLatch latch = new CountDownLatch(1); executeReloadSecureSettings(Strings.EMPTY_ARRAY, emptyPassword(), new ActionListener<>() { @@ -129,10 +129,10 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { final int initialReloadCount = mockReloadablePlugin.getReloadCount(); // invalid "keystore" file should be present in the config dir try (InputStream keystore = ReloadSecureSettingsIT.class.getResourceAsStream("invalid.txt.keystore")) { - if (Files.exists(environment.configFile()) == false) { - Files.createDirectory(environment.configFile()); + if (Files.exists(environment.configDir()) == false) { + Files.createDirectory(environment.configDir()); } - Files.copy(keystore, KeyStoreWrapper.keystorePath(environment.configFile()), StandardCopyOption.REPLACE_EXISTING); + Files.copy(keystore, KeyStoreWrapper.keystorePath(environment.configDir()), StandardCopyOption.REPLACE_EXISTING); } final CountDownLatch latch = new CountDownLatch(1); executeReloadSecureSettings(Strings.EMPTY_ARRAY, emptyPassword(), new ActionListener<>() { @@ -363,7 +363,7 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) { keyStoreWrapper.setString(VALID_SECURE_SETTING_NAME, new char[0]); - keyStoreWrapper.save(environment.configFile(), new char[0], false); + keyStoreWrapper.save(environment.configDir(), new char[0], false); } PlainActionFuture actionFuture = new PlainActionFuture<>(); @@ -374,7 +374,7 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) { assertThat(keyStoreWrapper, notNullValue()); keyStoreWrapper.setString("some.setting.that.does.not.exist", new char[0]); - keyStoreWrapper.save(environment.configFile(), new char[0], false); + keyStoreWrapper.save(environment.configDir(), new char[0], false); } actionFuture = new PlainActionFuture<>(); @@ -432,7 +432,7 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { private SecureSettings writeEmptyKeystore(Environment environment, char[] password) throws Exception { final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); - keyStoreWrapper.save(environment.configFile(), password, false); + keyStoreWrapper.save(environment.configDir(), password, false); return keyStoreWrapper; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java index 492912b5cd54..a0b158ed34a5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -220,7 +220,7 @@ public class IndexShardIT extends ESSingleNodeTestCase { public void testIndexDirIsDeletedWhenShardRemoved() throws Exception { Environment env = getInstanceFromNode(Environment.class); - Path idxPath = env.sharedDataFile().resolve(randomAlphaOfLength(10)); + Path idxPath = env.sharedDataDir().resolve(randomAlphaOfLength(10)); logger.info("--> idxPath: [{}]", idxPath); Settings idxSettings = Settings.builder().put(IndexMetadata.SETTING_DATA_PATH, idxPath).build(); createIndex("test", idxSettings); @@ -254,7 +254,7 @@ public class IndexShardIT extends ESSingleNodeTestCase { public void testIndexCanChangeCustomDataPath() throws Exception { final String index = "test-custom-data-path"; - final Path sharedDataPath = getInstanceFromNode(Environment.class).sharedDataFile().resolve(randomAsciiLettersOfLength(10)); + final Path sharedDataPath = getInstanceFromNode(Environment.class).sharedDataDir().resolve(randomAsciiLettersOfLength(10)); final Path indexDataPath = sharedDataPath.resolve("start-" + randomAsciiLettersOfLength(10)); logger.info("--> creating index [{}] with data_path [{}]", index, indexDataPath); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java index 4e9e4b4d641d..b9513dfb9518 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java @@ -32,7 +32,6 @@ import org.elasticsearch.cli.ProcessInfo; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -531,8 +530,7 @@ public class RemoveCorruptedShardDataCommandIT extends ESIntegTestCase { nodeNameToNodeId.put(cursor.getValue().getName(), cursor.getKey()); } - final GroupShardsIterator shardIterators = state.getRoutingTable() - .activePrimaryShardsGrouped(new String[] { indexName }, false); + final List shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[] { indexName }, false); final List iterators = iterableAsArrayList(shardIterators); final ShardRouting shardRouting = iterators.iterator().next().nextOrNull(); assertThat(shardRouting, notNullValue()); @@ -562,7 +560,7 @@ public class RemoveCorruptedShardDataCommandIT extends ESIntegTestCase { command.findAndProcessShardPath( options, environmentByNodeName.get(nodeName), - environmentByNodeName.get(nodeName).dataFiles(), + environmentByNodeName.get(nodeName).dataDirs(), state, shardPath -> assertThat(shardPath.resolveIndex(), equalTo(indexPath)) ); @@ -571,8 +569,7 @@ public class RemoveCorruptedShardDataCommandIT extends ESIntegTestCase { private Path getPathToShardData(String indexName, String dirSuffix) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator shardIterators = state.getRoutingTable() - .activePrimaryShardsGrouped(new String[] { indexName }, false); + List shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[] { indexName }, false); List iterators = iterableAsArrayList(shardIterators); ShardIterator shardIterator = RandomPicks.randomFrom(random(), iterators); ShardRouting shardRouting = shardIterator.nextOrNull(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java index 3a0475705cd8..17ca4be747cb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -34,7 +34,6 @@ import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -311,8 +310,7 @@ public class CorruptedFileIT extends ESIntegTestCase { } assertThat(response.getStatus(), is(ClusterHealthStatus.RED)); ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator shardIterators = state.getRoutingTable() - .activePrimaryShardsGrouped(new String[] { "test" }, false); + List shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[] { "test" }, false); for (ShardIterator iterator : shardIterators) { ShardRouting routing; while ((routing = iterator.nextOrNull()) != null) { @@ -667,7 +665,7 @@ public class CorruptedFileIT extends ESIntegTestCase { private int numShards(String... index) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(index, false); + List shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(index, false); return shardIterators.size(); } @@ -695,8 +693,7 @@ public class CorruptedFileIT extends ESIntegTestCase { private ShardRouting corruptRandomPrimaryFile(final boolean includePerCommitFiles) throws IOException { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); Index test = state.metadata().getProject().index("test").getIndex(); - GroupShardsIterator shardIterators = state.getRoutingTable() - .activePrimaryShardsGrouped(new String[] { "test" }, false); + List shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[] { "test" }, false); List iterators = iterableAsArrayList(shardIterators); ShardIterator shardIterator = RandomPicks.randomFrom(random(), iterators); ShardRouting shardRouting = shardIterator.nextOrNull(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java index 5cb468da7099..9256065f0d0c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.index.search.stats.SearchStats; @@ -24,6 +23,7 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.util.HashSet; +import java.util.List; import java.util.Set; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -146,7 +146,7 @@ public class SuggestStatsIT extends ESIntegTestCase { private Set nodeIdsWithIndex(String... indices) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); + List allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); Set nodes = new HashSet<>(); for (ShardIterator shardIterator : allAssignedShardsGrouped) { for (ShardRouting routing : shardIterator) { @@ -161,7 +161,7 @@ public class SuggestStatsIT extends ESIntegTestCase { protected int numAssignedShards(String... indices) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); + List allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); return allAssignedShardsGrouped.size(); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java index 2530dd35946f..ee7f76e6be3f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.settings.Settings; @@ -165,7 +164,7 @@ public class SearchStatsIT extends ESIntegTestCase { private Set nodeIdsWithIndex(String... indices) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); + List allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); Set nodes = new HashSet<>(); for (ShardIterator shardIterator : allAssignedShardsGrouped) { for (ShardRouting routing : shardIterator) { @@ -248,7 +247,7 @@ public class SearchStatsIT extends ESIntegTestCase { protected int numAssignedShards(String... indices) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); + List allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); return allAssignedShardsGrouped.size(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java index 77c7b4b762e6..c1549c1f3d38 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java @@ -195,7 +195,7 @@ public class MultiClusterRepoAccessIT extends AbstractSnapshotIntegTestCase { ); assertAcked(clusterAdmin().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName)); - IOUtils.rm(internalCluster().getCurrentMasterNodeInstance(Environment.class).resolveRepoFile(repoPath.toString())); + IOUtils.rm(internalCluster().getCurrentMasterNodeInstance(Environment.class).resolveRepoDir(repoPath.toString())); createRepository(repoName, "fs", repoPath); createFullSnapshot(repoName, "snap-1"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java index 6922b21be37f..2f4014bf4d35 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java @@ -316,7 +316,6 @@ public class SnapshotStatusApisIT extends AbstractSnapshotIntegTestCase { .get(); assertTrue(getSnapshotsResponse.getSnapshots().isEmpty()); - assertTrue(getSnapshotsResponse.getFailures().isEmpty()); } public void testGetSnapshotsMultipleRepos() throws Exception { diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index 032b10f0a30d..47cbe605b98e 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -118,6 +118,14 @@ public record TransportVersion(int id) implements VersionId { return VersionsHolder.ALL_VERSIONS; } + /** + * @return whether this is a known {@link TransportVersion}, i.e. one declared in {@link TransportVersions}. Other versions may exist + * in the wild (they're sent over the wire by numeric ID) but we don't know how to communicate using such versions. + */ + public boolean isKnown() { + return VersionsHolder.ALL_VERSIONS_MAP.containsKey(id); + } + public static TransportVersion fromString(String str) { return TransportVersion.fromId(Integer.parseInt(str)); } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 46b64d840c61..466716680190 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -176,6 +176,10 @@ public class TransportVersions { public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED_BACKPORT_8_X = def(8_840_0_01); public static final TransportVersion ELASTICSEARCH_9_0 = def(9_000_0_00); public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED = def(9_001_0_00); + public static final TransportVersion REMOVE_SNAPSHOT_FAILURES = def(9_002_0_00); + public static final TransportVersion TRANSPORT_STATS_HANDLING_TIME_REQUIRED = def(9_003_0_00); + public static final TransportVersion REMOVE_DESIRED_NODE_VERSION = def(9_004_0_00); + public static final TransportVersion ESQL_DRIVER_TASK_DESCRIPTION = def(9_005_0_00); /* * WARNING: DO NOT MERGE INTO MAIN! * This is the transport version used for all multi-project changes. diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java index c84df0ddfe64..3b773ae68684 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java @@ -108,7 +108,7 @@ public class TransportNodesReloadSecureSettingsAction extends TransportNodesActi Task task ) { // We default to using an empty string as the keystore password so that we mimic pre 7.3 API behavior - try (KeyStoreWrapper keystore = KeyStoreWrapper.load(environment.configFile())) { + try (KeyStoreWrapper keystore = KeyStoreWrapper.load(environment.configDir())) { // reread keystore from config file if (keystore == null) { return new NodesReloadSecureSettingsResponse.NodeResponse( diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index 6fe883a0e69d..6b9315cd4015 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedEx import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.project.ProjectResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -37,6 +36,7 @@ import org.elasticsearch.transport.TransportService; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; @@ -117,7 +117,7 @@ public class TransportClusterSearchShardsAction extends TransportMasterNodeReadA } Set nodeIds = new HashSet<>(); - GroupShardsIterator groupShardsIterator = clusterService.operationRouting() + List groupShardsIterator = clusterService.operationRouting() .searchShards(project, concreteIndices, routingMap, request.preference()); ShardRouting shard; ClusterSearchShardsGroup[] groupResponses = new ClusterSearchShardsGroup[groupShardsIterator.size()]; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java index dc261177567c..2436a9e29887 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java @@ -9,7 +9,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; @@ -17,12 +17,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -35,9 +33,6 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo private final List snapshots; - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // always empty, can be dropped - private final Map failures; - @Nullable private final String next; @@ -45,15 +40,8 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo private final int remaining; - public GetSnapshotsResponse( - List snapshots, - Map failures, - @Nullable String next, - final int total, - final int remaining - ) { + public GetSnapshotsResponse(List snapshots, @Nullable String next, final int total, final int remaining) { this.snapshots = List.copyOf(snapshots); - this.failures = failures == null ? Map.of() : Map.copyOf(failures); this.next = next; this.total = total; this.remaining = remaining; @@ -61,7 +49,10 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo public GetSnapshotsResponse(StreamInput in) throws IOException { this.snapshots = in.readCollectionAsImmutableList(SnapshotInfo::readFrom); - this.failures = Collections.unmodifiableMap(in.readMap(StreamInput::readException)); + if (in.getTransportVersion().before(TransportVersions.REMOVE_SNAPSHOT_FAILURES)) { + // Deprecated `failures` field + in.readMap(StreamInput::readException); + } this.next = in.readOptionalString(); this.total = in.readVInt(); this.remaining = in.readVInt(); @@ -76,25 +67,11 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo return snapshots; } - /** - * Returns a map of repository name to {@link ElasticsearchException} for each unsuccessful response. - */ - public Map getFailures() { - return failures; - } - @Nullable public String next() { return next; } - /** - * Returns true if there is at least one failed response. - */ - public boolean isFailed() { - return failures.isEmpty() == false; - } - public int totalCount() { return total; } @@ -106,7 +83,10 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(snapshots); - out.writeMap(failures, StreamOutput::writeException); + if (out.getTransportVersion().before(TransportVersions.REMOVE_SNAPSHOT_FAILURES)) { + // Deprecated `failures` field + out.writeMap(Map.of(), StreamOutput::writeException); + } out.writeOptionalString(next); out.writeVInt(total); out.writeVInt(remaining); @@ -120,18 +100,6 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo return b; }), Iterators.map(getSnapshots().iterator(), snapshotInfo -> snapshotInfo::toXContentExternal), Iterators.single((b, p) -> { b.endArray(); - if (failures.isEmpty() == false) { - b.startObject("failures"); - for (Map.Entry error : failures.entrySet()) { - b.field(error.getKey(), (bb, pa) -> { - bb.startObject(); - error.getValue().toXContent(bb, pa); - bb.endObject(); - return bb; - }); - } - b.endObject(); - } if (next != null) { b.field("next", next); } @@ -151,12 +119,12 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetSnapshotsResponse that = (GetSnapshotsResponse) o; - return Objects.equals(snapshots, that.snapshots) && Objects.equals(failures, that.failures) && Objects.equals(next, that.next); + return Objects.equals(snapshots, that.snapshots) && Objects.equals(next, that.next); } @Override public int hashCode() { - return Objects.hash(snapshots, failures, next); + return Objects.hash(snapshots, next); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 896b336d54d7..ec4a578ef25c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -543,7 +543,6 @@ public class TransportGetSnapshotsAction extends TransportMasterNodeAction 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, totalCount.get(), remaining diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeCapabilities.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeCapabilities.java new file mode 100644 index 000000000000..0574e05001f1 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeCapabilities.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.admin.indices.analyze; + +import java.util.Set; + +public final class AnalyzeCapabilities { + private AnalyzeCapabilities() {} + + private static final String WRONG_CUSTOM_ANALYZER_RETURNS_400_CAPABILITY = "wrong_custom_analyzer_returns_400"; + + public static final Set CAPABILITIES = Set.of(WRONG_CUSTOM_ANALYZER_RETURNS_400_CAPABILITY); +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 3283400059d8..2096108ee0ac 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -147,6 +147,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction shards( - ClusterState clusterState, - AnalyzeIndexDiskUsageRequest request, - String[] concreteIndices - ) { + protected List shards(ClusterState clusterState, AnalyzeIndexDiskUsageRequest request, String[] concreteIndices) { ProjectState project = projectResolver.getProjectState(clusterState); - final GroupShardsIterator groups = clusterService.operationRouting() - .searchShards(project, concreteIndices, null, null); + final List groups = clusterService.operationRouting().searchShards(project, concreteIndices, null, null); + for (ShardIterator group : groups) { // fails fast if any non-active groups if (group.size() == 0) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index 726573e10c9c..75e4f4ce4b50 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.project.ProjectResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -158,7 +157,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction< } @Override - protected GroupShardsIterator shards(ClusterState clusterState, ValidateQueryRequest request, String[] concreteIndices) { + protected List shards(ClusterState clusterState, ValidateQueryRequest request, String[] concreteIndices) { final String routing; if (request.allShards()) { routing = null; diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java index bd734d1f76fb..93095e872858 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java @@ -20,7 +20,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ProjectState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.project.ProjectResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -99,7 +98,7 @@ final class RequestDispatcher { ProjectState project = projectResolver.getProjectState(clusterState); for (String index : indices) { - final GroupShardsIterator shardIts; + final List shardIts; try { shardIts = clusterService.operationRouting().searchShards(project, new String[] { index }, null, null); } catch (Exception e) { @@ -256,7 +255,7 @@ final class RequestDispatcher { private final Set unmatchedShardIds = new HashSet<>(); private final Map failures = new HashMap<>(); - IndexSelector(GroupShardsIterator shardIts) { + IndexSelector(List shardIts) { for (ShardIterator shardIt : shardIts) { for (ShardRouting shard : shardIt) { nodeToShards.computeIfAbsent(shard.currentNodeId(), node -> new ArrayList<>()).add(shard); diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index aeea0a5d65c8..44752d6f3360 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.Maps; @@ -60,9 +59,9 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; /** - * This is an abstract base class that encapsulates the logic to fan out to all shards in provided {@link GroupShardsIterator} + * This is an abstract base class that encapsulates the logic to fan out to all shards in provided {@link List} * and collect the results. If a shard request returns a failure this class handles the advance to the next replica of the shard until - * the shards replica iterator is exhausted. Each shard is referenced by position in the {@link GroupShardsIterator} which is later + * the shards replica iterator is exhausted. Each shard is referenced by position in the {@link List} which is later * referred to as the {@code shardIndex}. * The fan out and collect algorithm is traditionally used as the initial phase which can either be a query execution or collection of * distributed frequencies @@ -90,15 +89,13 @@ abstract class AbstractSearchAsyncAction exten private final Object shardFailuresMutex = new Object(); private final AtomicBoolean hasShardResponse = new AtomicBoolean(false); private final AtomicInteger successfulOps = new AtomicInteger(); - private final AtomicInteger skippedOps = new AtomicInteger(); private final SearchTimeProvider timeProvider; private final SearchResponse.Clusters clusters; - protected final GroupShardsIterator toSkipShardsIts; - protected final GroupShardsIterator shardsIts; + protected final List toSkipShardsIts; + protected final List shardsIts; private final SearchShardIterator[] shardIterators; - private final int expectedTotalOps; - private final AtomicInteger totalOps = new AtomicInteger(); + private final AtomicInteger outstandingShards; private final int maxConcurrentRequestsPerNode; private final Map pendingExecutionsPerNode = new ConcurrentHashMap<>(); private final boolean throttleConcurrentRequests; @@ -118,7 +115,7 @@ abstract class AbstractSearchAsyncAction exten Executor executor, SearchRequest request, ActionListener listener, - GroupShardsIterator shardsIts, + List shardsIts, SearchTimeProvider timeProvider, ClusterState clusterState, SearchTask task, @@ -137,20 +134,14 @@ abstract class AbstractSearchAsyncAction exten iterators.add(iterator); } } - this.toSkipShardsIts = new GroupShardsIterator<>(toSkipIterators); - this.shardsIts = new GroupShardsIterator<>(iterators); - + this.toSkipShardsIts = toSkipIterators; + this.shardsIts = iterators; + outstandingShards = new AtomicInteger(shardsIts.size()); this.shardIterators = iterators.toArray(new SearchShardIterator[0]); // we later compute the shard index based on the natural order of the shards // that participate in the search request. This means that this number is // consistent between two requests that target the same shards. Arrays.sort(shardIterators); - - // we need to add 1 for non active partition, since we count it in the total. This means for each shard in the iterator we sum up - // it's number of active shards but use 1 as the default if no replica of a shard is active at this point. - // on a per shards level we use shardIt.remaining() to increment the totalOps pointer but add 1 for the current shard result - // we process hence we add one for the non active partition here. - this.expectedTotalOps = shardsIts.totalSizeWith1ForEmpty(); this.maxConcurrentRequestsPerNode = maxConcurrentRequestsPerNode; // in the case were we have less shards than maxConcurrentRequestsPerNode we don't need to throttle this.throttleConcurrentRequests = maxConcurrentRequestsPerNode < shardsIts.size(); @@ -179,8 +170,8 @@ abstract class AbstractSearchAsyncAction exten SearchSourceBuilder sourceBuilder ) { progressListener.notifyListShards( - SearchProgressListener.buildSearchShards(this.shardsIts), - SearchProgressListener.buildSearchShards(toSkipShardsIts), + SearchProgressListener.buildSearchShardsFromIter(this.shardsIts), + SearchProgressListener.buildSearchShardsFromIter(toSkipShardsIts), clusters, sourceBuilder == null || sourceBuilder.size() > 0, timeProvider @@ -251,9 +242,8 @@ abstract class AbstractSearchAsyncAction exten void skipShard(SearchShardIterator iterator) { successfulOps.incrementAndGet(); - skippedOps.incrementAndGet(); assert iterator.skip(); - successfulShardExecution(iterator); + successfulShardExecution(); } private static boolean assertExecuteOnStartThread() { @@ -380,7 +370,7 @@ abstract class AbstractSearchAsyncAction exten "Partial shards failure (unavailable: {}, successful: {}, skipped: {}, num-shards: {}, phase: {})", discrepancy, successfulOps.get(), - skippedOps.get(), + toSkipShardsIts.size(), getNumShards(), currentPhase ); @@ -449,17 +439,14 @@ abstract class AbstractSearchAsyncAction exten } onShardGroupFailure(shardIndex, shard, e); } - final int totalOps = this.totalOps.incrementAndGet(); - if (totalOps == expectedTotalOps) { - onPhaseDone(); - } else if (totalOps > expectedTotalOps) { - throw new AssertionError( - "unexpected higher total ops [" + totalOps + "] compared to expected [" + expectedTotalOps + "]", - new SearchPhaseExecutionException(getName(), "Shard failures", null, buildShardFailures()) - ); + if (lastShard == false) { + performPhaseOnShard(shardIndex, shardIt, nextShard); } else { - if (lastShard == false) { - performPhaseOnShard(shardIndex, shardIt, nextShard); + // count down outstanding shards, we're done with this shard as there's no more copies to try + final int outstanding = outstandingShards.decrementAndGet(); + assert outstanding >= 0 : "outstanding: " + outstanding; + if (outstanding == 0) { + onPhaseDone(); } } } @@ -535,10 +522,10 @@ abstract class AbstractSearchAsyncAction exten if (logger.isTraceEnabled()) { logger.trace("got first-phase result from {}", result != null ? result.getSearchShardTarget() : null); } - results.consumeResult(result, () -> onShardResultConsumed(result, shardIt)); + results.consumeResult(result, () -> onShardResultConsumed(result)); } - private void onShardResultConsumed(Result result, SearchShardIterator shardIt) { + private void onShardResultConsumed(Result result) { successfulOps.incrementAndGet(); // clean a previous error on this shard group (note, this code will be serialized on the same shardIndex value level // so its ok concurrency wise to miss potentially the shard failures being created because of another failure @@ -552,28 +539,14 @@ abstract class AbstractSearchAsyncAction exten // cause the successor to read a wrong value from successfulOps if second phase is very fast ie. count etc. // increment all the "future" shards to update the total ops since we some may work and some may not... // and when that happens, we break on total ops, so we must maintain them - successfulShardExecution(shardIt); + successfulShardExecution(); } - private void successfulShardExecution(SearchShardIterator shardsIt) { - final int remainingOpsOnIterator; - if (shardsIt.skip()) { - // It's possible that we're skipping a shard that's unavailable - // but its range was available in the IndexMetadata, in that - // case the shardsIt.remaining() would be 0, expectedTotalOps - // accounts for unavailable shards too. - remainingOpsOnIterator = Math.max(shardsIt.remaining(), 1); - } else { - remainingOpsOnIterator = shardsIt.remaining() + 1; - } - final int xTotalOps = totalOps.addAndGet(remainingOpsOnIterator); - if (xTotalOps == expectedTotalOps) { + private void successfulShardExecution() { + final int outstanding = outstandingShards.decrementAndGet(); + assert outstanding >= 0 : "outstanding: " + outstanding; + if (outstanding == 0) { onPhaseDone(); - } else if (xTotalOps > expectedTotalOps) { - throw new AssertionError( - "unexpected higher total ops [" + xTotalOps + "] compared to expected [" + expectedTotalOps + "]", - new SearchPhaseExecutionException(getName(), "Shard failures", null, buildShardFailures()) - ); } } @@ -640,7 +613,7 @@ abstract class AbstractSearchAsyncAction exten scrollId, getNumShards(), numSuccess, - skippedOps.get(), + toSkipShardsIts.size(), buildTookInMillis(), failures, clusters, diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index d45a8a6f01cd..f7b258a9f6b7 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -12,7 +12,6 @@ package org.elasticsearch.action.search; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.CountDown; @@ -61,8 +60,8 @@ final class CanMatchPreFilterSearchPhase { private final Logger logger; private final SearchRequest request; - private final GroupShardsIterator shardsIts; - private final ActionListener> listener; + private final List shardsIts; + private final ActionListener> listener; private final TransportSearchAction.SearchTimeProvider timeProvider; private final BiFunction nodeIdToConnection; private final SearchTransportService searchTransportService; @@ -86,12 +85,12 @@ final class CanMatchPreFilterSearchPhase { Map concreteIndexBoosts, Executor executor, SearchRequest request, - GroupShardsIterator shardsIts, + List shardsIts, TransportSearchAction.SearchTimeProvider timeProvider, SearchTask task, boolean requireAtLeastOneMatch, CoordinatorRewriteContextProvider coordinatorRewriteContextProvider, - ActionListener> listener + ActionListener> listener ) { this.logger = logger; this.searchTransportService = searchTransportService; @@ -169,10 +168,9 @@ final class CanMatchPreFilterSearchPhase { if (matchedShardLevelRequests.isEmpty()) { finishPhase(); } else { - GroupShardsIterator matchingShards = new GroupShardsIterator<>(matchedShardLevelRequests); // verify missing shards only for the shards that we hit for the query - checkNoMissingShards(matchingShards); - new Round(matchingShards).run(); + checkNoMissingShards(matchedShardLevelRequests); + new Round(matchedShardLevelRequests).run(); } } @@ -202,12 +200,12 @@ final class CanMatchPreFilterSearchPhase { minAndMaxes[shardIndex] = minAndMax; } - private void checkNoMissingShards(GroupShardsIterator shards) { + private void checkNoMissingShards(List shards) { assert assertSearchCoordinationThread(); SearchPhase.doCheckNoMissingShards("can_match", request, shards, SearchPhase::makeMissingShardsError); } - private Map> groupByNode(GroupShardsIterator shards) { + private Map> groupByNode(List shards) { Map> requests = new HashMap<>(); for (int i = 0; i < shards.size(); i++) { final SearchShardIterator shardRoutings = shards.get(i); @@ -230,11 +228,11 @@ final class CanMatchPreFilterSearchPhase { * to retry on other available shard copies. */ class Round extends AbstractRunnable { - private final GroupShardsIterator shards; + private final List shards; private final CountDown countDown; private final AtomicReferenceArray failedResponses; - Round(GroupShardsIterator shards) { + Round(List shards) { this.shards = shards; this.countDown = new CountDown(shards.size()); this.failedResponses = new AtomicReferenceArray<>(shardsIts.size()); @@ -328,7 +326,7 @@ final class CanMatchPreFilterSearchPhase { finishPhase(); } else { // trigger another round, forcing execution - executor.execute(new Round(new GroupShardsIterator<>(remainingShards)) { + executor.execute(new Round(remainingShards) { @Override public boolean isForceExecution() { return true; @@ -419,7 +417,7 @@ final class CanMatchPreFilterSearchPhase { listener.onFailure(new SearchPhaseExecutionException("can_match", msg, cause, ShardSearchFailure.EMPTY_ARRAY)); } - private synchronized GroupShardsIterator getIterator(GroupShardsIterator shardsIts) { + private synchronized List getIterator(List shardsIts) { // TODO: pick the local shard when possible if (requireAtLeastOneMatch && numPossibleMatches == 0) { // this is a special case where we have no hit but we need to get at least one search response in order @@ -452,14 +450,10 @@ final class CanMatchPreFilterSearchPhase { return shardsIts; } FieldSortBuilder fieldSort = FieldSortBuilder.getPrimaryFieldSortOrNull(request.source()); - return new GroupShardsIterator<>(sortShards(shardsIts, minAndMaxes, fieldSort.order())); + return sortShards(shardsIts, minAndMaxes, fieldSort.order()); } - private static List sortShards( - GroupShardsIterator shardsIts, - MinAndMax[] minAndMaxes, - SortOrder order - ) { + private static List sortShards(List shardsIts, MinAndMax[] minAndMaxes, SortOrder order) { int bound = shardsIts.size(); List toSort = new ArrayList<>(bound); for (int i = 0; i < bound; i++) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 5c5c47b5fcc4..056806fbb0b0 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -20,7 +20,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; @@ -56,7 +55,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction SearchPhaseResults queryPhaseResultConsumer, SearchRequest request, ActionListener listener, - GroupShardsIterator shardsIts, + List shardsIts, TransportSearchAction.SearchTimeProvider timeProvider, ClusterState clusterState, SearchTask task, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java index 702369dc3839..1308a2fb61cf 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java @@ -8,11 +8,11 @@ */ package org.elasticsearch.action.search; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.transport.Transport; +import java.util.List; import java.util.Objects; import java.util.function.Function; @@ -45,14 +45,14 @@ abstract class SearchPhase { + "]. Consider using `allow_partial_search_results` setting to bypass this error."; } - protected void doCheckNoMissingShards(String phaseName, SearchRequest request, GroupShardsIterator shardsIts) { + protected void doCheckNoMissingShards(String phaseName, SearchRequest request, List shardsIts) { doCheckNoMissingShards(phaseName, request, shardsIts, this::missingShardsErrorMessage); } protected static void doCheckNoMissingShards( String phaseName, SearchRequest request, - GroupShardsIterator shardsIts, + List shardsIts, Function makeErrorMessage ) { assert request.allowPartialSearchResults() != null : "SearchRequest missing setting for allowPartialSearchResults"; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java b/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java index a7f92700435a..6016a0c7a1eb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java @@ -13,7 +13,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.search.SearchResponse.Clusters; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -21,7 +20,6 @@ import org.elasticsearch.search.query.QuerySearchResult; import java.util.List; import java.util.Objects; -import java.util.stream.StreamSupport; /** * A listener that allows to track progress of the {@link TransportSearchAction}. @@ -225,7 +223,7 @@ public abstract class SearchProgressListener { .toList(); } - static List buildSearchShards(GroupShardsIterator its) { - return StreamSupport.stream(its.spliterator(), false).map(e -> new SearchShard(e.getClusterAlias(), e.shardId())).toList(); + static List buildSearchShardsFromIter(List its) { + return its.stream().map(e -> new SearchShard(e.getClusterAlias(), e.shardId())).toList(); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index f75b84abc2f0..088a16deb76d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -14,7 +14,6 @@ import org.apache.lucene.search.TopFieldDocs; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; @@ -25,6 +24,7 @@ import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.transport.Transport; +import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.function.BiFunction; @@ -52,7 +52,7 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction resultConsumer, SearchRequest request, ActionListener listener, - GroupShardsIterator shardsIts, + List shardsIts, TransportSearchAction.SearchTimeProvider timeProvider, ClusterState clusterState, SearchTask task, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 6c95a3c8fd43..b8d0a928e05a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -49,6 +48,7 @@ import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.function.BiFunction; @@ -150,7 +150,7 @@ public class TransportOpenPointInTimeAction extends HandledTransportAction shardIterators, + List shardIterators, TransportSearchAction.SearchTimeProvider timeProvider, BiFunction connectionLookup, ClusterState clusterState, @@ -212,7 +212,7 @@ public class TransportOpenPointInTimeAction extends HandledTransportAction shardIterators, + List shardIterators, TransportSearchAction.SearchTimeProvider timeProvider, BiFunction connectionLookup, ClusterState clusterState, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 7278305be3e0..8e7333155d76 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -11,6 +11,7 @@ package org.elasticsearch.action.search; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; @@ -44,7 +45,6 @@ import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.project.ProjectResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -1293,7 +1293,7 @@ public class TransportSearchAction extends HandledTransportAction shardIterators = mergeShardsIterators(localShardIterators, remoteShardIterators); + final List shardIterators = mergeShardsIterators(localShardIterators, remoteShardIterators); failIfOverShardCountLimit(clusterService, shardIterators.size()); @@ -1427,7 +1427,7 @@ public class TransportSearchAction extends HandledTransportAction mergeShardsIterators( + static List mergeShardsIterators( List localShardIterators, List remoteShardIterators ) { @@ -1437,7 +1437,8 @@ public class TransportSearchAction extends HandledTransportAction shardIterators, + List shardIterators, SearchTimeProvider timeProvider, BiFunction connectionLookup, ClusterState clusterState, @@ -1469,7 +1470,7 @@ public class TransportSearchAction extends HandledTransportAction shardIterators, + List shardIterators, SearchTimeProvider timeProvider, BiFunction connectionLookup, ClusterState clusterState, @@ -1866,7 +1867,7 @@ public class TransportSearchAction extends HandledTransportAction shardRoutings = clusterService.operationRouting() + List shardRoutings = clusterService.operationRouting() .searchShards( projectState, concreteIndices, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java index 7bef727a1d51..000104925b8c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.search; +import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RemoteClusterActionType; @@ -19,7 +20,6 @@ import org.elasticsearch.cluster.ProjectState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.project.ProjectResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.Rewriteable; @@ -145,15 +145,14 @@ public class TransportSearchShardsAction extends HandledTransportAction shardIts = GroupShardsIterator.sortAndCreate( - transportSearchAction.getLocalShardsIterator( - project, - searchRequest, - searchShardsRequest.clusterAlias(), - indicesAndAliases, - concreteIndexNames - ) + List shardIts = transportSearchAction.getLocalShardsIterator( + project, + searchRequest, + searchShardsRequest.clusterAlias(), + indicesAndAliases, + concreteIndexNames ); + CollectionUtil.timSort(shardIts); if (SearchService.canRewriteToMatchNone(searchRequest.source()) == false) { delegate.onResponse( new SearchShardsResponse(toGroups(shardIts), project.cluster().nodes().getAllNodes(), aliasFilters) @@ -179,7 +178,7 @@ public class TransportSearchShardsAction extends HandledTransportAction toGroups(GroupShardsIterator shardIts) { + private static List toGroups(List shardIts) { List groups = new ArrayList<>(shardIts.size()); for (SearchShardIterator shardIt : shardIts) { boolean skip = shardIt.skip(); diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index e9c00000c7c8..aed167b44911 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -36,6 +35,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.Transports; import java.io.IOException; +import java.util.List; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReferenceArray; @@ -110,7 +110,7 @@ public abstract class TransportBroadcastAction< * on the first shard in it. If the operation fails, it will be retried on the next shard in the iterator. */ @FixForMultiProject // add ProjectMetadata to this method - protected abstract GroupShardsIterator shards(ClusterState clusterState, Request request, String[] concreteIndices); + protected abstract List shards(ClusterState clusterState, Request request, String[] concreteIndices); protected abstract ClusterBlockException checkGlobalBlock(ClusterState state, Request request); @@ -123,7 +123,7 @@ public abstract class TransportBroadcastAction< final ActionListener listener; final ClusterState clusterState; final DiscoveryNodes nodes; - final GroupShardsIterator shardsIts; + final List shardsIts; final int expectedOps; final AtomicInteger counterOps = new AtomicInteger(); // ShardResponse or Exception diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java index d51613b7dccc..03e6077f7704 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ProjectState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.project.ProjectResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; @@ -67,13 +66,8 @@ public class TransportTermVectorsAction extends TransportSingleShardAction groupShardsIter = operationRouting.searchShards( - project, - new String[] { request.concreteIndex() }, - null, - request.request().preference() - ); - return groupShardsIter.iterator().next(); + return operationRouting.searchShards(project, new String[] { request.concreteIndex() }, null, request.request().preference()) + .getFirst(); } return operationRouting.useOnlyPromotableShardsForStateless( diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java index 8f1537d917c1..f4363ce1948b 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java @@ -24,7 +24,7 @@ public class BootstrapUtil { public static SecureSettings loadSecureSettings(Environment initialEnv, SecureString keystorePassword) throws BootstrapException { try { - return KeyStoreWrapper.bootstrap(initialEnv.configFile(), () -> keystorePassword); + return KeyStoreWrapper.bootstrap(initialEnv.configDir(), () -> keystorePassword); } catch (Exception e) { throw new BootstrapException(e); } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java b/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java index 7b85b369b5dd..54244f320840 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java @@ -52,7 +52,7 @@ public class ConsoleLoader { } private static ClassLoader buildClassLoader(Environment env) { - final Path libDir = env.libFile().resolve("tools").resolve("ansi-console"); + final Path libDir = env.libDir().resolve("tools").resolve("ansi-console"); try (var libDirFilesStream = Files.list(libDir)) { final URL[] urls = libDirFilesStream.filter(each -> each.getFileName().toString().endsWith(".jar")) diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 9256a3029895..ba978f09dfef 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -187,7 +187,7 @@ class Elasticsearch { nodeEnv.validateNativesConfig(); // temporary directories are important for JNA initializeNatives( - nodeEnv.tmpFile(), + nodeEnv.tmpDir(), BootstrapSettings.MEMORY_LOCK_SETTING.get(args.nodeSettings()), true, // always install system call filters, not user-configurable since 8.0.0 BootstrapSettings.CTRLHANDLER_SETTING.get(args.nodeSettings()) @@ -223,8 +223,8 @@ class Elasticsearch { ); // load the plugin Java modules and layers now for use in entitlements - var modulesBundles = PluginsLoader.loadModulesBundles(nodeEnv.modulesFile()); - var pluginsBundles = PluginsLoader.loadPluginsBundles(nodeEnv.pluginsFile()); + var modulesBundles = PluginsLoader.loadModulesBundles(nodeEnv.modulesDir()); + var pluginsBundles = PluginsLoader.loadPluginsBundles(nodeEnv.pluginsDir()); final PluginsLoader pluginsLoader; @@ -245,9 +245,9 @@ class Elasticsearch { EntitlementBootstrap.bootstrap( pluginPolicies, pluginsResolver::resolveClassToPluginName, - nodeEnv.dataFiles(), - nodeEnv.configFile(), - nodeEnv.tmpFile() + nodeEnv.dataDirs(), + nodeEnv.configDir(), + nodeEnv.tmpDir() ); } else if (RuntimeVersionFeature.isSecurityManagerAvailable()) { // no need to explicitly enable native access for legacy code diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Security.java b/server/src/main/java/org/elasticsearch/bootstrap/Security.java index dc6de9a6b2c9..a352112b67af 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -178,11 +178,11 @@ final class Security { } }; - for (Path plugin : PluginsUtils.findPluginDirs(environment.pluginsFile())) { - addPolicy.accept(PolicyUtil.getPluginPolicyInfo(plugin, environment.tmpFile())); + for (Path plugin : PluginsUtils.findPluginDirs(environment.pluginsDir())) { + addPolicy.accept(PolicyUtil.getPluginPolicyInfo(plugin, environment.tmpDir())); } - for (Path plugin : PluginsUtils.findPluginDirs(environment.modulesFile())) { - addPolicy.accept(PolicyUtil.getModulePolicyInfo(plugin, environment.tmpFile())); + for (Path plugin : PluginsUtils.findPluginDirs(environment.modulesDir())) { + addPolicy.accept(PolicyUtil.getModulePolicyInfo(plugin, environment.tmpDir())); } return Collections.unmodifiableMap(map); @@ -199,7 +199,7 @@ final class Security { private static List createRecursiveDataPathPermission(Environment environment) throws IOException { Permissions policy = new Permissions(); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { addDirectoryPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete", true); } return toFilePermissions(policy); @@ -215,13 +215,13 @@ final class Security { Map> securedSettingKeys = new HashMap<>(); for (URL url : mainCodebases) { - for (Permission p : PolicyUtil.getPolicyPermissions(url, template, environment.tmpFile())) { + for (Permission p : PolicyUtil.getPolicyPermissions(url, template, environment.tmpDir())) { readSecuredConfigFilePermissions(environment, url, p, securedConfigFiles, securedSettingKeys); } } for (var pp : pluginPolicies.entrySet()) { - for (Permission p : PolicyUtil.getPolicyPermissions(pp.getKey(), pp.getValue(), environment.tmpFile())) { + for (Permission p : PolicyUtil.getPolicyPermissions(pp.getKey(), pp.getValue(), environment.tmpDir())) { readSecuredConfigFilePermissions(environment, pp.getKey(), p, securedConfigFiles, securedSettingKeys); } } @@ -242,8 +242,8 @@ final class Security { // If the setting shouldn't be an HTTPS URL, that'll be caught by that setting's validation later in the process. // HTTP (no S) URLs are not supported. if (settingValue.toLowerCase(Locale.ROOT).startsWith("https://") == false) { - Path file = environment.configFile().resolve(settingValue); - if (file.startsWith(environment.configFile()) == false) { + Path file = environment.configDir().resolve(settingValue); + if (file.startsWith(environment.configDir()) == false) { throw new IllegalStateException( ps.getValue() + " tried to grant access to file outside config directory " + file ); @@ -263,9 +263,9 @@ final class Security { // always add some config files as exclusive files that no one can access // there's no reason for anyone to read these once the security manager is initialized // so if something has tried to grant itself access, crash out with an error - addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("elasticsearch.yml").toString()); - addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("jvm.options").toString()); - addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("jvm.options.d/-").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configDir().resolve("elasticsearch.yml").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configDir().resolve("jvm.options").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configDir().resolve("jvm.options.d/-").toString()); return Collections.unmodifiableMap(securedConfigFiles); } @@ -279,8 +279,8 @@ final class Security { ) { String securedFileName = extractSecuredName(p, SecuredConfigFileAccessPermission.class); if (securedFileName != null) { - Path securedFile = environment.configFile().resolve(securedFileName); - if (securedFile.startsWith(environment.configFile()) == false) { + Path securedFile = environment.configDir().resolve(securedFileName); + if (securedFile.startsWith(environment.configDir()) == false) { throw new IllegalStateException("[" + url + "] tried to grant access to file outside config directory " + securedFile); } logger.debug("Jar {} securing access to config file {}", url, securedFile); @@ -336,26 +336,26 @@ final class Security { */ static void addFilePermissions(Permissions policy, Environment environment, Path pidFile) throws IOException { // read-only dirs - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.binFile(), "read,readlink", false); - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.libFile(), "read,readlink", false); - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.modulesFile(), "read,readlink", false); - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.pluginsFile(), "read,readlink", false); - addDirectoryPath(policy, "path.conf", environment.configFile(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.binDir(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.libDir(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.modulesDir(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.pluginsDir(), "read,readlink", false); + addDirectoryPath(policy, "path.conf", environment.configDir(), "read,readlink", false); // read-write dirs - addDirectoryPath(policy, "java.io.tmpdir", environment.tmpFile(), "read,readlink,write,delete", false); - addDirectoryPath(policy, Environment.PATH_LOGS_SETTING.getKey(), environment.logsFile(), "read,readlink,write,delete", false); - if (environment.sharedDataFile() != null) { + addDirectoryPath(policy, "java.io.tmpdir", environment.tmpDir(), "read,readlink,write,delete", false); + addDirectoryPath(policy, Environment.PATH_LOGS_SETTING.getKey(), environment.logsDir(), "read,readlink,write,delete", false); + if (environment.sharedDataDir() != null) { addDirectoryPath( policy, Environment.PATH_SHARED_DATA_SETTING.getKey(), - environment.sharedDataFile(), + environment.sharedDataDir(), "read,readlink,write,delete", false ); } final Set dataFilesPaths = new HashSet<>(); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { addDirectoryPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete", false); /* * We have to do this after adding the path because a side effect of that is that the directory is created; the Path#toRealPath @@ -371,7 +371,7 @@ final class Security { throw new IllegalStateException("unable to access [" + path + "]", e); } } - for (Path path : environment.repoFiles()) { + for (Path path : environment.repoDirs()) { addDirectoryPath(policy, Environment.PATH_REPO_SETTING.getKey(), path, "read,readlink,write,delete", false); } @@ -380,7 +380,7 @@ final class Security { addSingleFilePath(policy, pidFile, "delete"); } // we need to touch the operator/settings.json file when restoring from snapshots, on some OSs it needs file write permission - addSingleFilePath(policy, environment.configFile().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME), "read,readlink,write"); + addSingleFilePath(policy, environment.configDir().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME), "read,readlink,write"); } /** diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java index 6a4296d9b047..35284cebf22a 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java @@ -69,14 +69,14 @@ final class Spawner implements Closeable { if (spawned.compareAndSet(false, true) == false) { throw new IllegalStateException("native controllers already spawned"); } - if (Files.exists(environment.modulesFile()) == false) { - throw new IllegalStateException("modules directory [" + environment.modulesFile() + "] not found"); + if (Files.exists(environment.modulesDir()) == false) { + throw new IllegalStateException("modules directory [" + environment.modulesDir() + "] not found"); } /* * For each module, attempt to spawn the controller daemon. Silently ignore any module that doesn't include a controller for the * correct platform. */ - List paths = PluginsUtils.findPluginDirs(environment.modulesFile()); + List paths = PluginsUtils.findPluginDirs(environment.modulesDir()); for (final Path modules : paths) { final PluginDescriptor info = PluginDescriptor.readFromProperties(modules); final Path spawnPath = Platforms.nativeControllerPath(modules); @@ -91,7 +91,7 @@ final class Spawner implements Closeable { ); throw new IllegalArgumentException(message); } - final Process process = spawnNativeController(spawnPath, environment.tmpFile()); + final Process process = spawnNativeController(spawnPath, environment.tmpDir()); // The process _shouldn't_ write any output via its stdout or stderr, but if it does then // it will block if nothing is reading that output. To avoid this we can pipe the // outputs and create pump threads to write any messages there to the ES log. diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java index 8366083b1907..b0d1758d895e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java @@ -11,10 +11,8 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -22,7 +20,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.Processors; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -36,7 +33,6 @@ import java.util.Locale; import java.util.Objects; import java.util.Set; import java.util.TreeSet; -import java.util.regex.Pattern; import static java.lang.String.format; import static org.elasticsearch.node.Node.NODE_EXTERNAL_ID_SETTING; @@ -52,8 +48,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl private static final ParseField PROCESSORS_RANGE_FIELD = new ParseField("processors_range"); private static final ParseField MEMORY_FIELD = new ParseField("memory"); private static final ParseField STORAGE_FIELD = new ParseField("storage"); - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated field - private static final ParseField VERSION_FIELD = new ParseField("node_version"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "desired_node", @@ -63,8 +57,7 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl (Processors) args[1], (ProcessorsRange) args[2], (ByteSizeValue) args[3], - (ByteSizeValue) args[4], - (String) args[5] + (ByteSizeValue) args[4] ) ); @@ -98,12 +91,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl STORAGE_FIELD, ObjectParser.ValueType.STRING ); - parser.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> p.text(), - VERSION_FIELD, - ObjectParser.ValueType.STRING - ); } private final Settings settings; @@ -112,21 +99,9 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl private final ByteSizeValue memory; private final ByteSizeValue storage; - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated version field - private final String version; private final String externalId; private final Set roles; - @Deprecated - public DesiredNode(Settings settings, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage, String version) { - this(settings, null, processorsRange, memory, storage, version); - } - - @Deprecated - public DesiredNode(Settings settings, double processors, ByteSizeValue memory, ByteSizeValue storage, String version) { - this(settings, Processors.of(processors), null, memory, storage, version); - } - public DesiredNode(Settings settings, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage) { this(settings, null, processorsRange, memory, storage); } @@ -136,17 +111,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl } DesiredNode(Settings settings, Processors processors, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage) { - this(settings, processors, processorsRange, memory, storage, null); - } - - DesiredNode( - Settings settings, - Processors processors, - ProcessorsRange processorsRange, - ByteSizeValue memory, - ByteSizeValue storage, - @Deprecated String version - ) { assert settings != null; assert memory != null; assert storage != null; @@ -180,7 +144,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl this.processorsRange = processorsRange; this.memory = memory; this.storage = storage; - this.version = version; this.externalId = NODE_EXTERNAL_ID_SETTING.get(settings); this.roles = Collections.unmodifiableSortedSet(new TreeSet<>(DiscoveryNode.getRolesFromSettings(settings))); } @@ -198,25 +161,10 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl } final var memory = ByteSizeValue.readFrom(in); final var storage = ByteSizeValue.readFrom(in); - final String version; - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { - version = in.readOptionalString(); - } else { - version = Version.readVersion(in).toString(); + if (in.getTransportVersion().before(TransportVersions.REMOVE_DESIRED_NODE_VERSION)) { + in.readOptionalString(); } - return new DesiredNode(settings, processors, processorsRange, memory, storage, version); - } - - private static final Pattern SEMANTIC_VERSION_PATTERN = Pattern.compile("^(\\d+\\.\\d+\\.\\d+)\\D?.*"); - - private static Version parseLegacyVersion(String version) { - if (version != null) { - var semanticVersionMatcher = SEMANTIC_VERSION_PATTERN.matcher(version); - if (semanticVersionMatcher.matches()) { - return Version.fromString(semanticVersionMatcher.group(1)); - } - } - return null; + return new DesiredNode(settings, processors, processorsRange, memory, storage); } @Override @@ -232,16 +180,8 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl } memory.writeTo(out); storage.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { - out.writeOptionalString(version); - } else { - Version parsedVersion = parseLegacyVersion(version); - if (version == null) { - // Some node is from before we made the version field not required. If so, fill in with the current node version. - Version.writeVersion(Version.CURRENT, out); - } else { - Version.writeVersion(parsedVersion, out); - } + if (out.getTransportVersion().before(TransportVersions.REMOVE_DESIRED_NODE_VERSION)) { + out.writeOptionalString(null); } } @@ -269,14 +209,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl } builder.field(MEMORY_FIELD.getPreferredName(), memory); builder.field(STORAGE_FIELD.getPreferredName(), storage); - addDeprecatedVersionField(builder); - } - - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated field from response - private void addDeprecatedVersionField(XContentBuilder builder) throws IOException { - if (version != null) { - builder.field(VERSION_FIELD.getPreferredName(), version); - } } public boolean hasMasterRole() { @@ -356,7 +288,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl return Objects.equals(settings, that.settings) && Objects.equals(memory, that.memory) && Objects.equals(storage, that.storage) - && Objects.equals(version, that.version) && Objects.equals(externalId, that.externalId) && Objects.equals(roles, that.roles); } @@ -369,7 +300,7 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl @Override public int hashCode() { - return Objects.hash(settings, processors, processorsRange, memory, storage, version, externalId, roles); + return Objects.hash(settings, processors, processorsRange, memory, storage, externalId, roles); } @Override @@ -398,10 +329,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl + '}'; } - public boolean hasVersion() { - return Strings.isNullOrBlank(version) == false; - } - public record ProcessorsRange(Processors min, @Nullable Processors max) implements Writeable, ToXContentObject { private static final ParseField MIN_FIELD = new ParseField("min"); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java index 7b89406be9aa..606309adf205 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java @@ -44,13 +44,12 @@ public record DesiredNodeWithStatus(DesiredNode desiredNode, Status status) (Processors) args[1], (DesiredNode.ProcessorsRange) args[2], (ByteSizeValue) args[3], - (ByteSizeValue) args[4], - (String) args[5] + (ByteSizeValue) args[4] ), // An unknown status is expected during upgrades to versions >= STATUS_TRACKING_SUPPORT_VERSION // the desired node status would be populated when a node in the newer version is elected as // master, the desired nodes status update happens in NodeJoinExecutor. - args[6] == null ? Status.PENDING : (Status) args[6] + args[5] == null ? Status.PENDING : (Status) args[5] ) ); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index bc44dbfadbfd..6355fdc8387f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1531,7 +1531,7 @@ public class MetadataCreateIndexService { } List getIndexSettingsValidationErrors(final Settings settings, final boolean forbidPrivateIndexSettings) { - List validationErrors = validateIndexCustomPath(settings, env.sharedDataFile()); + List validationErrors = validateIndexCustomPath(settings, env.sharedDataDir()); if (forbidPrivateIndexSettings) { validationErrors.addAll(validatePrivateSettingsNotExplicitlySet(settings, indexScopedSettings)); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java b/server/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java deleted file mode 100644 index 32f9530e4b18..000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.routing; - -import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.common.util.Countable; - -import java.util.Iterator; -import java.util.List; - -/** - * This class implements a compilation of {@link ShardIterator}s. Each {@link ShardIterator} - * iterated by this {@link Iterable} represents a group of shards. - * ShardsIterators are always returned in ascending order independently of their order at construction - * time. The incoming iterators are sorted to ensure consistent iteration behavior across Nodes / JVMs. -*/ -public final class GroupShardsIterator & Countable> implements Iterable { - - private final List iterators; - - /** - * Constructs a new sorted GroupShardsIterator from the given list. Items are sorted based on their natural ordering. - * @see PlainShardIterator#compareTo(ShardIterator) - */ - public static & Countable> GroupShardsIterator sortAndCreate(List iterators) { - CollectionUtil.timSort(iterators); - return new GroupShardsIterator<>(iterators); - } - - /** - * Constructs a new GroupShardsIterator from the given list. - */ - public GroupShardsIterator(List iterators) { - this.iterators = iterators; - } - - /** - * Returns the total number of shards within all groups - * @return total number of shards - */ - public int totalSize() { - return iterators.stream().mapToInt(Countable::size).sum(); - } - - /** - * Returns the total number of shards plus the number of empty groups - * @return number of shards and empty groups - */ - public int totalSizeWith1ForEmpty() { - int size = 0; - for (ShardIt shard : iterators) { - size += Math.max(1, shard.size()); - } - return size; - } - - /** - * Return the number of groups - * @return number of groups - */ - public int size() { - return iterators.size(); - } - - @Override - public Iterator iterator() { - return iterators.iterator(); - } - - public ShardIt get(int index) { - return iterators.get(index); - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index 8a26e05b1a14..e824793d2636 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing; +import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.ProjectState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.ProjectMetadata; @@ -28,6 +29,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -91,7 +93,7 @@ public class OperationRouting { } } - public GroupShardsIterator searchShards( + public List searchShards( ProjectState projectState, String[] concreteIndices, @Nullable Map> routing, @@ -100,7 +102,7 @@ public class OperationRouting { return searchShards(projectState, concreteIndices, routing, preference, null, null); } - public GroupShardsIterator searchShards( + public List searchShards( ProjectState projectState, String[] concreteIndices, @Nullable Map> routing, @@ -124,7 +126,9 @@ public class OperationRouting { set.add(PlainShardIterator.allSearchableShards(iterator)); } } - return GroupShardsIterator.sortAndCreate(new ArrayList<>(set)); + var res = new ArrayList<>(set); + CollectionUtil.timSort(res); + return res; } public static ShardIterator getShards(RoutingTable routingTable, ShardId shardId) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index 557a6f7cd4ca..94dc08fbea2b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing; +import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; @@ -195,7 +196,7 @@ public class RoutingTable implements Iterable, Diffable allActiveShardsGrouped(String[] indices, boolean includeEmpty) { + public List allActiveShardsGrouped(String[] indices, boolean includeEmpty) { return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, ShardRouting::active); } @@ -204,11 +205,11 @@ public class RoutingTable implements Iterable, Diffable allAssignedShardsGrouped(String[] indices, boolean includeEmpty) { + public List allAssignedShardsGrouped(String[] indices, boolean includeEmpty) { return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, ShardRouting::assignedToNode); } - private GroupShardsIterator allSatisfyingPredicateShardsGrouped( + private List allSatisfyingPredicateShardsGrouped( String[] indices, boolean includeEmpty, Predicate predicate @@ -233,7 +234,8 @@ public class RoutingTable implements Iterable, Diffable, Diffable activePrimaryShardsGrouped(String[] indices, boolean includeEmpty) { + public List activePrimaryShardsGrouped(String[] indices, boolean includeEmpty) { // use list here since we need to maintain identity across shards ArrayList set = new ArrayList<>(); for (String index : indices) { @@ -304,7 +306,8 @@ public class RoutingTable implements Iterable, Diffable implements ToXContentObject { return new Setting<>(key, Boolean.toString(defaultValue), booleanParser(key, properties), validator, properties); } + public static Setting boolSetting( + String key, + Function defaultValueFn, + Validator validator, + Property... properties + ) { + return new Setting<>(key, defaultValueFn, booleanParser(key, properties), validator, properties); + } + public static Setting boolSetting(String key, Function defaultValueFn, Property... properties) { return new Setting<>(key, defaultValueFn, booleanParser(key, properties), properties); } diff --git a/server/src/main/java/org/elasticsearch/env/Environment.java b/server/src/main/java/org/elasticsearch/env/Environment.java index f9594655719f..813da761159c 100644 --- a/server/src/main/java/org/elasticsearch/env/Environment.java +++ b/server/src/main/java/org/elasticsearch/env/Environment.java @@ -46,28 +46,28 @@ public class Environment { private final Settings settings; - private final Path[] dataFiles; + private final Path[] dataDirs; - private final Path[] repoFiles; + private final Path[] repoDirs; - private final Path configFile; + private final Path configDir; - private final Path pluginsFile; + private final Path pluginsDir; - private final Path modulesFile; + private final Path modulesDir; - private final Path sharedDataFile; + private final Path sharedDataDir; /** location of bin/, used by plugin manager */ - private final Path binFile; + private final Path binDir; /** location of lib/, */ - private final Path libFile; + private final Path libDir; - private final Path logsFile; + private final Path logsDir; /** Path to the temporary file directory used by the JDK */ - private final Path tmpFile; + private final Path tmpDir; public Environment(final Settings settings, final Path configPath) { this(settings, configPath, PathUtils.get(System.getProperty("java.io.tmpdir"))); @@ -83,67 +83,67 @@ public class Environment { } if (configPath != null) { - configFile = configPath.toAbsolutePath().normalize(); + configDir = configPath.toAbsolutePath().normalize(); } else { - configFile = homeFile.resolve("config"); + configDir = homeFile.resolve("config"); } - tmpFile = Objects.requireNonNull(tmpPath); + tmpDir = Objects.requireNonNull(tmpPath); - pluginsFile = homeFile.resolve("plugins"); + pluginsDir = homeFile.resolve("plugins"); List dataPaths = PATH_DATA_SETTING.get(settings); if (dataPaths.isEmpty() == false) { - dataFiles = new Path[dataPaths.size()]; + dataDirs = new Path[dataPaths.size()]; for (int i = 0; i < dataPaths.size(); i++) { - dataFiles[i] = PathUtils.get(dataPaths.get(i)).toAbsolutePath().normalize(); + dataDirs[i] = PathUtils.get(dataPaths.get(i)).toAbsolutePath().normalize(); } } else { - dataFiles = new Path[] { homeFile.resolve("data") }; + dataDirs = new Path[] { homeFile.resolve("data") }; } if (PATH_SHARED_DATA_SETTING.exists(settings)) { - sharedDataFile = PathUtils.get(PATH_SHARED_DATA_SETTING.get(settings)).toAbsolutePath().normalize(); + sharedDataDir = PathUtils.get(PATH_SHARED_DATA_SETTING.get(settings)).toAbsolutePath().normalize(); } else { - sharedDataFile = null; + sharedDataDir = null; } List repoPaths = PATH_REPO_SETTING.get(settings); if (repoPaths.isEmpty()) { - repoFiles = EMPTY_PATH_ARRAY; + repoDirs = EMPTY_PATH_ARRAY; } else { - repoFiles = new Path[repoPaths.size()]; + repoDirs = new Path[repoPaths.size()]; for (int i = 0; i < repoPaths.size(); i++) { - repoFiles[i] = PathUtils.get(repoPaths.get(i)).toAbsolutePath().normalize(); + repoDirs[i] = PathUtils.get(repoPaths.get(i)).toAbsolutePath().normalize(); } } // this is trappy, Setting#get(Settings) will get a fallback setting yet return false for Settings#exists(Settings) if (PATH_LOGS_SETTING.exists(settings)) { - logsFile = PathUtils.get(PATH_LOGS_SETTING.get(settings)).toAbsolutePath().normalize(); + logsDir = PathUtils.get(PATH_LOGS_SETTING.get(settings)).toAbsolutePath().normalize(); } else { - logsFile = homeFile.resolve("logs"); + logsDir = homeFile.resolve("logs"); } - binFile = homeFile.resolve("bin"); - libFile = homeFile.resolve("lib"); - modulesFile = homeFile.resolve("modules"); + binDir = homeFile.resolve("bin"); + libDir = homeFile.resolve("lib"); + modulesDir = homeFile.resolve("modules"); final Settings.Builder finalSettings = Settings.builder().put(settings); if (PATH_DATA_SETTING.exists(settings)) { if (dataPathUsesList(settings)) { - finalSettings.putList(PATH_DATA_SETTING.getKey(), Arrays.stream(dataFiles).map(Path::toString).toList()); + finalSettings.putList(PATH_DATA_SETTING.getKey(), Arrays.stream(dataDirs).map(Path::toString).toList()); } else { - assert dataFiles.length == 1; - finalSettings.put(PATH_DATA_SETTING.getKey(), dataFiles[0]); + assert dataDirs.length == 1; + finalSettings.put(PATH_DATA_SETTING.getKey(), dataDirs[0]); } } finalSettings.put(PATH_HOME_SETTING.getKey(), homeFile); - finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile.toString()); + finalSettings.put(PATH_LOGS_SETTING.getKey(), logsDir.toString()); if (PATH_REPO_SETTING.exists(settings)) { - finalSettings.putList(Environment.PATH_REPO_SETTING.getKey(), Arrays.stream(repoFiles).map(Path::toString).toList()); + finalSettings.putList(Environment.PATH_REPO_SETTING.getKey(), Arrays.stream(repoDirs).map(Path::toString).toList()); } if (PATH_SHARED_DATA_SETTING.exists(settings)) { - assert sharedDataFile != null; - finalSettings.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataFile.toString()); + assert sharedDataDir != null; + finalSettings.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataDir.toString()); } this.settings = finalSettings.build(); @@ -159,22 +159,22 @@ public class Environment { /** * The data location. */ - public Path[] dataFiles() { - return dataFiles; + public Path[] dataDirs() { + return dataDirs; } /** * The shared data location */ - public Path sharedDataFile() { - return sharedDataFile; + public Path sharedDataDir() { + return sharedDataDir; } /** * The shared filesystem repo locations. */ - public Path[] repoFiles() { - return repoFiles; + public Path[] repoDirs() { + return repoDirs; } /** @@ -182,8 +182,8 @@ public class Environment { * * If the specified location doesn't match any of the roots, returns null. */ - public Path resolveRepoFile(String location) { - return PathUtils.get(repoFiles, location); + public Path resolveRepoDir(String location) { + return PathUtils.get(repoDirs, location); } /** @@ -197,7 +197,7 @@ public class Environment { if ("file".equalsIgnoreCase(url.getProtocol())) { if (url.getHost() == null || "".equals(url.getHost())) { // only local file urls are supported - Path path = PathUtils.get(repoFiles, url.toURI()); + Path path = PathUtils.get(repoDirs, url.toURI()); if (path == null) { // Couldn't resolve against known repo locations return null; @@ -232,49 +232,48 @@ public class Environment { } } - // TODO: rename all these "file" methods to "dir" /** * The config directory. */ - public Path configFile() { - return configFile; + public Path configDir() { + return configDir; } - public Path pluginsFile() { - return pluginsFile; + public Path pluginsDir() { + return pluginsDir; } - public Path binFile() { - return binFile; + public Path binDir() { + return binDir; } - public Path libFile() { - return libFile; + public Path libDir() { + return libDir; } - public Path modulesFile() { - return modulesFile; + public Path modulesDir() { + return modulesDir; } - public Path logsFile() { - return logsFile; + public Path logsDir() { + return logsDir; } /** Path to the default temp directory used by the JDK */ - public Path tmpFile() { - return tmpFile; + public Path tmpDir() { + return tmpDir; } /** Ensure the configured temp directory is a valid directory */ - public void validateTmpFile() throws IOException { - validateTemporaryDirectory("Temporary directory", tmpFile); + public void validateTmpDir() throws IOException { + validateTemporaryDirectory("Temporary directory", tmpDir); } /** * Ensure the temp directories needed for JNA are set up correctly. */ public void validateNativesConfig() throws IOException { - validateTmpFile(); + validateTmpDir(); if (Constants.LINUX) { validateTemporaryDirectory(LIBFFI_TMPDIR_ENVIRONMENT_VARIABLE + " environment variable", getLibffiTemporaryDirectory()); } @@ -335,15 +334,15 @@ public class Environment { * object which may contain different setting) */ public static void assertEquivalent(Environment actual, Environment expected) { - assertEquals(actual.dataFiles(), expected.dataFiles(), "dataFiles"); - assertEquals(actual.repoFiles(), expected.repoFiles(), "repoFiles"); - assertEquals(actual.configFile(), expected.configFile(), "configFile"); - assertEquals(actual.pluginsFile(), expected.pluginsFile(), "pluginsFile"); - assertEquals(actual.binFile(), expected.binFile(), "binFile"); - assertEquals(actual.libFile(), expected.libFile(), "libFile"); - assertEquals(actual.modulesFile(), expected.modulesFile(), "modulesFile"); - assertEquals(actual.logsFile(), expected.logsFile(), "logsFile"); - assertEquals(actual.tmpFile(), expected.tmpFile(), "tmpFile"); + assertEquals(actual.dataDirs(), expected.dataDirs(), "dataDirs"); + assertEquals(actual.repoDirs(), expected.repoDirs(), "repoDirs"); + assertEquals(actual.configDir(), expected.configDir(), "configDir"); + assertEquals(actual.pluginsDir(), expected.pluginsDir(), "pluginsDir"); + assertEquals(actual.binDir(), expected.binDir(), "binDir"); + assertEquals(actual.libDir(), expected.libDir(), "libDir"); + assertEquals(actual.modulesDir(), expected.modulesDir(), "modulesDir"); + assertEquals(actual.logsDir(), expected.logsDir(), "logsDir"); + assertEquals(actual.tmpDir(), expected.tmpDir(), "tmpDir"); } private static void assertEquals(Object actual, Object expected, String name) { diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 90e2ae5c6270..febde6b6a69a 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -215,10 +215,10 @@ public final class NodeEnvironment implements Closeable { final CheckedFunction pathFunction, final Function subPathMapping ) throws IOException { - dataPaths = new DataPath[environment.dataFiles().length]; + dataPaths = new DataPath[environment.dataDirs().length]; locks = new Lock[dataPaths.length]; try { - final Path[] dataPaths = environment.dataFiles(); + final Path[] dataPaths = environment.dataDirs(); for (int dirIndex = 0; dirIndex < dataPaths.length; dirIndex++) { Path dataDir = dataPaths[dirIndex]; Path dir = subPathMapping.apply(dataDir); @@ -267,9 +267,9 @@ public final class NodeEnvironment implements Closeable { boolean success = false; try { - sharedDataPath = environment.sharedDataFile(); + sharedDataPath = environment.sharedDataDir(); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { if (Files.exists(path)) { // Call to toRealPath required to resolve symlinks. // We let it fall through to create directories to ensure the symlink @@ -287,7 +287,7 @@ public final class NodeEnvironment implements Closeable { Locale.ROOT, "failed to obtain node locks, tried %s;" + " maybe these locations are not writable or multiple nodes were started on the same data path?", - Arrays.toString(environment.dataFiles()) + Arrays.toString(environment.dataDirs()) ); throw new IllegalStateException(message, e); } @@ -310,7 +310,7 @@ public final class NodeEnvironment implements Closeable { } // versions 7.x and earlier put their data under ${path.data}/nodes/; leave a file at that location to prevent downgrades - for (Path dataPath : environment.dataFiles()) { + for (Path dataPath : environment.dataDirs()) { final Path legacyNodesPath = dataPath.resolve("nodes"); if (Files.isRegularFile(legacyNodesPath) == false) { final String content = "written by Elasticsearch " @@ -349,7 +349,7 @@ public final class NodeEnvironment implements Closeable { boolean upgradeNeeded = false; // check if we can do an auto-upgrade - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { final Path nodesFolderPath = path.resolve("nodes"); if (Files.isDirectory(nodesFolderPath)) { final List nodeLockIds = new ArrayList<>(); @@ -392,7 +392,7 @@ public final class NodeEnvironment implements Closeable { return false; } - logger.info("upgrading legacy data folders: {}", Arrays.toString(environment.dataFiles())); + logger.info("upgrading legacy data folders: {}", Arrays.toString(environment.dataDirs())); // acquire locks on legacy path for duration of upgrade (to ensure there is no older ES version running on this path) final NodeLock legacyNodeLock; @@ -403,7 +403,7 @@ public final class NodeEnvironment implements Closeable { Locale.ROOT, "failed to obtain legacy node locks, tried %s;" + " maybe these locations are not writable or multiple nodes were started on the same data path?", - Arrays.toString(environment.dataFiles()) + Arrays.toString(environment.dataDirs()) ); throw new IllegalStateException(message, e); } @@ -494,7 +494,7 @@ public final class NodeEnvironment implements Closeable { } // upgrade successfully completed, remove legacy nodes folders - IOUtils.rm(Stream.of(environment.dataFiles()).map(path -> path.resolve("nodes")).toArray(Path[]::new)); + IOUtils.rm(Stream.of(environment.dataDirs()).map(path -> path.resolve("nodes")).toArray(Path[]::new)); return true; } diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 4895930eaefe..525f90accdf3 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.Mapper; @@ -39,6 +40,7 @@ import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; @@ -722,9 +724,25 @@ public final class IndexSettings { Setting.Property.ServerlessPublic ); + public static final FeatureFlag RECOVERY_USE_SYNTHETIC_SOURCE = new FeatureFlag("index_recovery_use_synthetic_source"); public static final Setting RECOVERY_USE_SYNTHETIC_SOURCE_SETTING = Setting.boolSetting( "index.recovery.use_synthetic_source", - false, + settings -> { + boolean isSyntheticSourceRecoveryFeatureFlagEnabled = RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled(); + boolean isNewIndexVersion = SETTING_INDEX_VERSION_CREATED.get(settings) + .onOrAfter(IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT); + boolean isIndexVersionInBackportRange = SETTING_INDEX_VERSION_CREATED.get(settings) + .between(IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0); + + boolean useSyntheticRecoverySource = isSyntheticSourceRecoveryFeatureFlagEnabled + && (isNewIndexVersion || isIndexVersionInBackportRange); + + return String.valueOf( + useSyntheticRecoverySource + && Objects.equals(INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings), SourceFieldMapper.Mode.SYNTHETIC) + ); + + }, new Setting.Validator<>() { @Override public void validate(Boolean value) {} @@ -1083,7 +1101,8 @@ public final class IndexSettings { skipIgnoredSourceRead = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING); indexMappingSourceMode = scopedSettings.get(INDEX_MAPPER_SOURCE_MODE_SETTING); recoverySourceEnabled = RecoverySettings.INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(nodeSettings); - recoverySourceSyntheticEnabled = scopedSettings.get(RECOVERY_USE_SYNTHETIC_SOURCE_SETTING); + recoverySourceSyntheticEnabled = DiscoveryNode.isStateless(nodeSettings) == false + && scopedSettings.get(RECOVERY_USE_SYNTHETIC_SOURCE_SETTING); if (recoverySourceSyntheticEnabled) { if (DiscoveryNode.isStateless(settings)) { throw new IllegalArgumentException("synthetic recovery source is only allowed in stateful"); diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 3b173ace0ac7..64f4c356bb12 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -134,6 +134,7 @@ public class IndexVersions { public static final IndexVersion UPGRADE_TO_LUCENE_9_12_1 = def(8_523_0_00, parseUnchecked("9.12.1")); public static final IndexVersion INFERENCE_METADATA_FIELDS_BACKPORT = def(8_524_0_00, parseUnchecked("9.12.1")); public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME_BACKPORT = def(8_525_0_00, parseUnchecked("9.12.1")); + public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT_BACKPORT = def(8_526_0_00, parseUnchecked("9.12.1")); public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_0_00, Version.LUCENE_10_0_0); public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_0_00, Version.LUCENE_10_0_0); public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_0_00, Version.LUCENE_10_0_0); @@ -144,6 +145,7 @@ public class IndexVersions { public static final IndexVersion SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP = def(9_007_0_00, Version.LUCENE_10_0_0); public static final IndexVersion HOSTNAME_DOC_VALUES_SPARSE_INDEX = def(9_008_0_00, Version.LUCENE_10_0_0); public static final IndexVersion UPGRADE_TO_LUCENE_10_1_0 = def(9_009_0_00, Version.LUCENE_10_1_0); + public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT = def(9_010_00_0, Version.LUCENE_10_1_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java index 505e39a9590e..a93a48046356 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -233,7 +233,7 @@ public class Analysis { } } - final Path path = env.configFile().resolve(wordListPath); + final Path path = env.configDir().resolve(wordListPath); try { return loadWordList(path, removeComments); @@ -337,7 +337,7 @@ public class Analysis { if (filePath == null) { return null; } - final Path path = env.configFile().resolve(filePath); + final Path path = env.configDir().resolve(filePath); try { return Files.newBufferedReader(path, StandardCharsets.UTF_8); } catch (CharacterCodingException ex) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java new file mode 100644 index 000000000000..28ea37ef73e3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java @@ -0,0 +1,270 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedSetDocValues; +import org.elasticsearch.search.fetch.StoredFieldsSpec; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +/** + * Block loader for fields that use fallback synthetic source implementation. + *
+ * Usually fields have doc_values or stored fields and block loaders use them directly. In some cases neither is available + * and we would fall back to (potentially synthetic) _source. However, in case of synthetic source, there is actually no need to + * construct the entire _source. We know that there is no doc_values and stored fields, and therefore we will be using fallback synthetic + * source. That is equivalent to just reading _ignored_source stored field directly and doing an in-place synthetic source just + * for this field. + *
+ * See {@link IgnoredSourceFieldMapper}. + */ +public abstract class FallbackSyntheticSourceBlockLoader implements BlockLoader { + private final Reader reader; + private final String fieldName; + + protected FallbackSyntheticSourceBlockLoader(Reader reader, String fieldName) { + this.reader = reader; + this.fieldName = fieldName; + } + + @Override + public ColumnAtATimeReader columnAtATimeReader(LeafReaderContext context) throws IOException { + return null; + } + + @Override + public RowStrideReader rowStrideReader(LeafReaderContext context) throws IOException { + return new IgnoredSourceRowStrideReader<>(fieldName, reader); + } + + @Override + public StoredFieldsSpec rowStrideStoredFieldSpec() { + return new StoredFieldsSpec(false, false, Set.of(IgnoredSourceFieldMapper.NAME)); + } + + @Override + public boolean supportsOrdinals() { + return false; + } + + @Override + public SortedSetDocValues ordinals(LeafReaderContext context) throws IOException { + throw new UnsupportedOperationException(); + } + + private record IgnoredSourceRowStrideReader(String fieldName, Reader reader) implements RowStrideReader { + @Override + public void read(int docId, StoredFields storedFields, Builder builder) throws IOException { + var ignoredSource = storedFields.storedFields().get(IgnoredSourceFieldMapper.NAME); + if (ignoredSource == null) { + return; + } + + Map> valuesForFieldAndParents = new HashMap<>(); + + // Contains name of the field and all its parents + Set fieldNames = new HashSet<>() { + { + add("_doc"); + } + }; + + var current = new StringBuilder(); + for (String part : fieldName.split("\\.")) { + if (current.isEmpty() == false) { + current.append('.'); + } + current.append(part); + fieldNames.add(current.toString()); + } + + for (Object value : ignoredSource) { + IgnoredSourceFieldMapper.NameValue nameValue = IgnoredSourceFieldMapper.decode(value); + if (fieldNames.contains(nameValue.name())) { + valuesForFieldAndParents.computeIfAbsent(nameValue.name(), k -> new ArrayList<>()).add(nameValue); + } + } + + // TODO figure out how to handle XContentDataHelper#voidValue() + + var blockValues = new ArrayList(); + + var leafFieldValue = valuesForFieldAndParents.get(fieldName); + if (leafFieldValue != null) { + readFromFieldValue(leafFieldValue, blockValues); + } else { + readFromParentValue(valuesForFieldAndParents, blockValues); + } + + if (blockValues.isEmpty() == false) { + if (blockValues.size() > 1) { + builder.beginPositionEntry(); + } + + reader.writeToBlock(blockValues, builder); + + if (blockValues.size() > 1) { + builder.endPositionEntry(); + } + } else { + builder.appendNull(); + } + } + + private void readFromFieldValue(List nameValues, List blockValues) throws IOException { + if (nameValues.isEmpty()) { + return; + } + + for (var nameValue : nameValues) { + // Leaf field is stored directly (not as a part of a parent object), let's try to decode it. + Optional singleValue = XContentDataHelper.decode(nameValue.value()); + if (singleValue.isPresent()) { + reader.convertValue(singleValue.get(), blockValues); + continue; + } + + // We have a value for this field but it's an array or an object + var type = XContentDataHelper.decodeType(nameValue.value()); + assert type.isPresent(); + + try ( + XContentParser parser = type.get() + .xContent() + .createParser( + XContentParserConfiguration.EMPTY, + nameValue.value().bytes, + nameValue.value().offset + 1, + nameValue.value().length - 1 + ) + ) { + parser.nextToken(); + parseWithReader(parser, blockValues); + } + } + } + + private void readFromParentValue( + Map> valuesForFieldAndParents, + List blockValues + ) throws IOException { + if (valuesForFieldAndParents.isEmpty()) { + return; + } + + // If a parent object is stored at a particular level its children won't be stored. + // So we should only ever have one parent here. + assert valuesForFieldAndParents.size() == 1 : "_ignored_source field contains multiple levels of the same object"; + var parentValues = valuesForFieldAndParents.values().iterator().next(); + + for (var nameValue : parentValues) { + parseFieldFromParent(nameValue, blockValues); + } + } + + private void parseFieldFromParent(IgnoredSourceFieldMapper.NameValue nameValue, List blockValues) throws IOException { + var type = XContentDataHelper.decodeType(nameValue.value()); + assert type.isPresent(); + + String nameAtThisLevel = fieldName.substring(nameValue.name().length() + 1); + var filterParserConfig = XContentParserConfiguration.EMPTY.withFiltering(null, Set.of(nameAtThisLevel), Set.of(), true); + try ( + XContentParser parser = type.get() + .xContent() + .createParser(filterParserConfig, nameValue.value().bytes, nameValue.value().offset + 1, nameValue.value().length - 1) + ) { + parser.nextToken(); + var fieldNameInParser = new StringBuilder(nameValue.name()); + while (true) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + fieldNameInParser.append('.').append(parser.currentName()); + if (fieldNameInParser.toString().equals(fieldName)) { + parser.nextToken(); + break; + } + } + parser.nextToken(); + } + parseWithReader(parser, blockValues); + } + } + + private void parseWithReader(XContentParser parser, List blockValues) throws IOException { + if (parser.currentToken() == XContentParser.Token.START_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + reader.parse(parser, blockValues); + } + return; + } + + reader.parse(parser, blockValues); + } + + @Override + public boolean canReuse(int startingDocID) { + return true; + } + } + + /** + * Field-specific implementation that converts data stored in _ignored_source field to block loader values. + * @param + */ + public interface Reader { + /** + * Converts a raw stored value for this field to a value in a format suitable for block loader and adds it to the provided + * accumulator. + * @param value raw decoded value from _ignored_source field (synthetic _source value) + * @param accumulator list containing the result of conversion + */ + void convertValue(Object value, List accumulator); + + /** + * Parses one or more complex values using a provided parser and adds them to the provided accumulator. + * @param parser parser of a value from _ignored_source field (synthetic _source value) + * @param accumulator list containing the results of parsing + */ + void parse(XContentParser parser, List accumulator) throws IOException; + + void writeToBlock(List values, Builder blockBuilder); + } + + public abstract static class ReaderWithNullValueSupport implements Reader { + private final T nullValue; + + public ReaderWithNullValueSupport(T nullValue) { + this.nullValue = nullValue; + } + + @Override + public void parse(XContentParser parser, List accumulator) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + if (nullValue != null) { + convertValue(nullValue, accumulator); + } + return; + } + + parseNonNullValue(parser, accumulator); + } + + abstract void parseNonNullValue(XContentParser parser, List accumulator) throws IOException; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index b7528bd3729e..5cd968dff52a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -67,6 +67,7 @@ import org.elasticsearch.search.runtime.StringScriptFieldRegexpQuery; import org.elasticsearch.search.runtime.StringScriptFieldTermQuery; import org.elasticsearch.search.runtime.StringScriptFieldWildcardQuery; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.io.UncheckedIOException; @@ -74,6 +75,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -738,10 +740,54 @@ public final class KeywordFieldMapper extends FieldMapper { if (isStored()) { return new BlockStoredFieldsReader.BytesFromBytesRefsBlockLoader(name()); } + + if (isSyntheticSource) { + return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) { + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.bytesRefs(expectedCount); + } + }; + } + SourceValueFetcher fetcher = sourceValueFetcher(blContext.sourcePaths(name())); return new BlockSourceReader.BytesRefsBlockLoader(fetcher, sourceBlockLoaderLookup(blContext)); } + private FallbackSyntheticSourceBlockLoader.Reader fallbackSyntheticSourceBlockLoaderReader() { + var nullValueBytes = nullValue != null ? new BytesRef(nullValue) : null; + return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport<>(nullValueBytes) { + @Override + public void convertValue(Object value, List accumulator) { + String stringValue = ((BytesRef) value).utf8ToString(); + String adjusted = applyIgnoreAboveAndNormalizer(stringValue); + if (adjusted != null) { + // TODO what if the value didn't change? + accumulator.add(new BytesRef(adjusted)); + } + } + + @Override + public void parseNonNullValue(XContentParser parser, List accumulator) throws IOException { + assert parser.currentToken() == XContentParser.Token.VALUE_STRING : "Unexpected token " + parser.currentToken(); + + var value = applyIgnoreAboveAndNormalizer(parser.text()); + if (value != null) { + accumulator.add(new BytesRef(value)); + } + } + + @Override + public void writeToBlock(List values, BlockLoader.Builder blockBuilder) { + var bytesRefBuilder = (BlockLoader.BytesRefBuilder) blockBuilder; + + for (var value : values) { + bytesRefBuilder.appendBytesRef(value); + } + } + }; + } + private BlockSourceReader.LeafIteratorLookup sourceBlockLoaderLookup(BlockLoaderContext blContext) { if (getTextSearchInfo().hasNorms()) { return BlockSourceReader.lookupFromNorms(name()); @@ -821,15 +867,19 @@ public final class KeywordFieldMapper extends FieldMapper { @Override protected String parseSourceValue(Object value) { String keywordValue = value.toString(); - if (keywordValue.length() > ignoreAbove) { - return null; - } - - return normalizeValue(normalizer(), name(), keywordValue); + return applyIgnoreAboveAndNormalizer(keywordValue); } }; } + private String applyIgnoreAboveAndNormalizer(String value) { + if (value.length() > ignoreAbove) { + return null; + } + + return normalizeValue(normalizer(), name(), value); + } + @Override public Object valueForDisplay(Object value) { if (value == null) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index abca8e057f3b..4c9214015fba 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -1127,7 +1127,7 @@ public class ObjectMapper extends Mapper { for (SourceLoader.SyntheticFieldLoader loader : fields) { ignoredValuesPresent |= loader.setIgnoredValues(objectsWithIgnoredFields); } - return this.ignoredValues != null; + return ignoredValuesPresent; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/XContentDataHelper.java b/server/src/main/java/org/elasticsearch/index/mapper/XContentDataHelper.java index 646368b96a4c..8c2f567e2896 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/XContentDataHelper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/XContentDataHelper.java @@ -110,6 +110,28 @@ public final class XContentDataHelper { } } + /** + * Decode the value in the passed {@link BytesRef} in place and return it. + * Returns {@link Optional#empty()} for complex values (objects and arrays). + */ + static Optional decode(BytesRef r) { + return switch ((char) r.bytes[r.offset]) { + case BINARY_ENCODING -> Optional.of(TypeUtils.EMBEDDED_OBJECT.decode(r)); + case CBOR_OBJECT_ENCODING, JSON_OBJECT_ENCODING, YAML_OBJECT_ENCODING, SMILE_OBJECT_ENCODING -> Optional.empty(); + case BIG_DECIMAL_ENCODING -> Optional.of(TypeUtils.BIG_DECIMAL.decode(r)); + case FALSE_ENCODING, TRUE_ENCODING -> Optional.of(TypeUtils.BOOLEAN.decode(r)); + case BIG_INTEGER_ENCODING -> Optional.of(TypeUtils.BIG_INTEGER.decode(r)); + case STRING_ENCODING -> Optional.of(TypeUtils.STRING.decode(r)); + case INTEGER_ENCODING -> Optional.of(TypeUtils.INTEGER.decode(r)); + case LONG_ENCODING -> Optional.of(TypeUtils.LONG.decode(r)); + case DOUBLE_ENCODING -> Optional.of(TypeUtils.DOUBLE.decode(r)); + case FLOAT_ENCODING -> Optional.of(TypeUtils.FLOAT.decode(r)); + case NULL_ENCODING -> Optional.ofNullable(TypeUtils.NULL.decode(r)); + case VOID_ENCODING -> Optional.of(TypeUtils.VOID.decode(r)); + default -> throw new IllegalArgumentException("Can't decode " + r); + }; + } + /** * Determines if the given {@link BytesRef}, encoded with {@link XContentDataHelper#encodeToken(XContentParser)}, * is an encoded object. @@ -339,6 +361,11 @@ public final class XContentDataHelper { return bytes; } + @Override + Object decode(BytesRef r) { + return new BytesRef(r.bytes, r.offset + 1, r.length - 1); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(new BytesRef(r.bytes, r.offset + 1, r.length - 1).utf8ToString()); @@ -359,6 +386,11 @@ public final class XContentDataHelper { return bytes; } + @Override + Object decode(BytesRef r) { + return ByteUtils.readIntLE(r.bytes, 1 + r.offset); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(ByteUtils.readIntLE(r.bytes, 1 + r.offset)); @@ -379,6 +411,11 @@ public final class XContentDataHelper { return bytes; } + @Override + Object decode(BytesRef r) { + return ByteUtils.readLongLE(r.bytes, 1 + r.offset); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(ByteUtils.readLongLE(r.bytes, 1 + r.offset)); @@ -399,6 +436,11 @@ public final class XContentDataHelper { return bytes; } + @Override + Object decode(BytesRef r) { + return ByteUtils.readDoubleLE(r.bytes, 1 + r.offset); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(ByteUtils.readDoubleLE(r.bytes, 1 + r.offset)); @@ -419,6 +461,11 @@ public final class XContentDataHelper { return bytes; } + @Override + Object decode(BytesRef r) { + return ByteUtils.readFloatLE(r.bytes, 1 + r.offset); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(ByteUtils.readFloatLE(r.bytes, 1 + r.offset)); @@ -437,6 +484,11 @@ public final class XContentDataHelper { return bytes; } + @Override + Object decode(BytesRef r) { + return new BigInteger(r.bytes, r.offset + 1, r.length - 1); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(new BigInteger(r.bytes, r.offset + 1, r.length - 1)); @@ -455,6 +507,15 @@ public final class XContentDataHelper { return bytes; } + @Override + Object decode(BytesRef r) { + if (r.length < 5) { + throw new IllegalArgumentException("Can't decode " + r); + } + int scale = ByteUtils.readIntLE(r.bytes, r.offset + 1); + return new BigDecimal(new BigInteger(r.bytes, r.offset + 5, r.length - 5), scale); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { if (r.length < 5) { @@ -477,6 +538,15 @@ public final class XContentDataHelper { return bytes; } + @Override + Object decode(BytesRef r) { + if (r.length != 1) { + throw new IllegalArgumentException("Can't decode " + r); + } + assert r.bytes[r.offset] == 't' || r.bytes[r.offset] == 'f' : r.bytes[r.offset]; + return r.bytes[r.offset] == 't'; + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { if (r.length != 1) { @@ -499,6 +569,11 @@ public final class XContentDataHelper { return bytes; } + @Override + Object decode(BytesRef r) { + return null; + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.nullValue(); @@ -517,6 +592,11 @@ public final class XContentDataHelper { return bytes; } + @Override + Object decode(BytesRef r) { + return new BytesRef(r.bytes, r.offset + 1, r.length - 1); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(r.bytes, r.offset + 1, r.length - 1); @@ -538,6 +618,11 @@ public final class XContentDataHelper { } } + @Override + Object decode(BytesRef r) { + throw new UnsupportedOperationException(); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { switch ((char) r.bytes[r.offset]) { @@ -562,6 +647,11 @@ public final class XContentDataHelper { return bytes; } + @Override + Object decode(BytesRef r) { + throw new UnsupportedOperationException(); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) { // NOOP @@ -591,6 +681,8 @@ public final class XContentDataHelper { abstract byte[] encode(XContentParser parser) throws IOException; + abstract Object decode(BytesRef r); + abstract void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException; static byte[] encode(BigInteger n, Byte encoding) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 886c5e4bf6d3..bfe1cd9b28de 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -122,7 +122,7 @@ public final class HunspellService { } private static Path resolveHunspellDirectory(Environment env) { - return env.configFile().resolve("hunspell"); + return env.configDir().resolve("hunspell"); } /** @@ -193,7 +193,7 @@ public final class HunspellService { affixStream = Files.newInputStream(affixFiles[0]); - try (Directory tmp = new NIOFSDirectory(env.tmpFile())) { + try (Directory tmp = new NIOFSDirectory(env.tmpDir())) { return new Dictionary(tmp, "hunspell", affixStream, dicStreams, ignoreCase); } diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 80c9aafaa84b..f4b390c9863f 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -652,7 +652,7 @@ public class Node implements Closeable { * Writes a file to the logs dir containing the ports for the given transport type */ private void writePortsFile(String type, BoundTransportAddress boundAddress) { - Path tmpPortsFile = environment.logsFile().resolve(type + ".ports.tmp"); + Path tmpPortsFile = environment.logsDir().resolve(type + ".ports.tmp"); try (BufferedWriter writer = Files.newBufferedWriter(tmpPortsFile, Charset.forName("UTF-8"))) { for (TransportAddress address : boundAddress.boundAddresses()) { InetAddress inetAddress = InetAddress.getByName(address.getAddress()); @@ -661,7 +661,7 @@ public class Node implements Closeable { } catch (IOException e) { throw new RuntimeException("Failed to write ports file", e); } - Path portsFile = environment.logsFile().resolve(type + ".ports"); + Path portsFile = environment.logsDir().resolve(type + ".ports"); try { Files.move(tmpPortsFile, portsFile, StandardCopyOption.ATOMIC_MOVE); } catch (IOException e) { diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 91f67015a4d4..df476160fdef 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -450,7 +450,7 @@ class NodeConstruction { ); } - if (initialEnvironment.dataFiles().length > 1) { + if (initialEnvironment.dataDirs().length > 1) { // NOTE: we use initialEnvironment here, but assertEquivalent below ensures the data paths do not change deprecationLogger.warn( DeprecationCategory.SETTINGS, @@ -471,10 +471,10 @@ class NodeConstruction { if (logger.isDebugEnabled()) { logger.debug( "using config [{}], data [{}], logs [{}], plugins [{}]", - initialEnvironment.configFile(), - Arrays.toString(initialEnvironment.dataFiles()), - initialEnvironment.logsFile(), - initialEnvironment.pluginsFile() + initialEnvironment.configDir(), + Arrays.toString(initialEnvironment.dataDirs()), + initialEnvironment.logsDir(), + initialEnvironment.pluginsDir() ); } @@ -491,7 +491,7 @@ class NodeConstruction { * Create the environment based on the finalized view of the settings. This is to ensure that components get the same setting * values, no matter they ask for them from. */ - environment = new Environment(settings, initialEnvironment.configFile()); + environment = new Environment(settings, initialEnvironment.configDir()); Environment.assertEquivalent(initialEnvironment, environment); modules.bindToInstance(Environment.class, environment); @@ -1657,7 +1657,7 @@ class NodeConstruction { pluginsService.filterPlugins(DiscoveryPlugin.class).toList(), pluginsService.filterPlugins(ClusterCoordinationPlugin.class).toList(), allocationService, - environment.configFile(), + environment.configDir(), gatewayMetaState, rerouteService, fsHealthService, diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index 4b7524a7ac01..b9e58863cad6 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -53,7 +53,7 @@ class NodeServiceProvider { PluginsService newPluginService(Environment initialEnvironment, PluginsLoader pluginsLoader) { // this creates a PluginsService with an empty list of classpath plugins - return new PluginsService(initialEnvironment.settings(), initialEnvironment.configFile(), pluginsLoader); + return new PluginsService(initialEnvironment.settings(), initialEnvironment.configDir(), pluginsLoader); } ScriptService newScriptService( diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java index 9eb9a93439bc..93de38784556 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java @@ -8,6 +8,8 @@ */ package org.elasticsearch.persistent; +import org.elasticsearch.core.Strings; + import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -23,7 +25,17 @@ public class PersistentTasksExecutorRegistry { public PersistentTasksExecutorRegistry(Collection> taskExecutors) { Map> map = new HashMap<>(); for (PersistentTasksExecutor executor : taskExecutors) { - map.put(executor.getTaskName(), executor); + final var old = map.put(executor.getTaskName(), executor); + if (old != null) { + final var message = Strings.format( + "task [%s] is already registered with [%s], cannot re-register with [%s]", + executor.getTaskName(), + old, + executor + ); + assert false : message; + throw new IllegalStateException(message); + } } this.taskExecutors = Collections.unmodifiableMap(map); } diff --git a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index b88524586abe..f284faf8f304 100644 --- a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -92,13 +92,13 @@ public class FsRepository extends BlobStoreRepository { ); throw new RepositoryException(metadata.name(), "missing location"); } - Path locationFile = environment.resolveRepoFile(location); + Path locationFile = environment.resolveRepoDir(location); if (locationFile == null) { - if (environment.repoFiles().length > 0) { + if (environment.repoDirs().length > 0) { logger.warn( "The specified location [{}] doesn't start with any " + "repository paths specified by the path.repo setting: [{}] ", location, - environment.repoFiles() + environment.repoDirs() ); throw new RepositoryException( metadata.name(), @@ -127,7 +127,7 @@ public class FsRepository extends BlobStoreRepository { @Override protected BlobStore createBlobStore() throws Exception { final String location = REPOSITORIES_LOCATION_SETTING.get(getMetadata().settings()); - final Path locationFile = environment.resolveRepoFile(location); + final Path locationFile = environment.resolveRepoDir(location); return new FsBlobStore(bufferSize, locationFile, isReadOnly()); } diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index 95c8926f4e90..d7e402db462e 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -89,7 +89,7 @@ public class FileSettingsService extends MasterNodeFileWatchingService implement Environment environment, FileSettingsHealthIndicatorService healthIndicatorService ) { - super(clusterService, environment.configFile().toAbsolutePath().resolve(OPERATOR_DIRECTORY)); + super(clusterService, environment.configDir().toAbsolutePath().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME)); this.watchedFile = watchedFileDir().resolve(SETTINGS_FILE_NAME); this.stateService = stateService; this.healthIndicatorService = healthIndicatorService; diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java index da7a7d3379ee..9ab7f275252a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java @@ -12,8 +12,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesAction; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.metadata.DesiredNode; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -27,10 +25,6 @@ import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; public class RestUpdateDesiredNodesAction extends BaseRestHandler { - private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestUpdateDesiredNodesAction.class); - private static final String VERSION_DEPRECATION_MESSAGE = - "[version removal] Specifying node_version in desired nodes requests is deprecated."; - @Override public String getName() { return "update_desired_nodes"; @@ -59,10 +53,6 @@ public class RestUpdateDesiredNodesAction extends BaseRestHandler { ); } - if (updateDesiredNodesRequest.getNodes().stream().anyMatch(DesiredNode::hasVersion)) { - deprecationLogger.compatibleCritical("desired_nodes_version", VERSION_DEPRECATION_MESSAGE); - } - return restChannel -> client.execute( UpdateDesiredNodesAction.INSTANCE, updateDesiredNodesRequest, diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java index 06e9b02a9293..7659e096c115 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeCapabilities; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -19,6 +20,7 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.List; +import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -49,4 +51,9 @@ public class RestAnalyzeAction extends BaseRestHandler { } } + @Override + public Set supportedCapabilities() { + return AnalyzeCapabilities.CAPABILITIES; + } + } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java index 94fa60762800..f8dc26e9c468 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java @@ -9,11 +9,9 @@ package org.elasticsearch.rest.action.cat; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.core.TimeValue; @@ -99,24 +97,6 @@ public class RestSnapshotAction extends AbstractCatAction { private Table buildTable(RestRequest req, GetSnapshotsResponse getSnapshotsResponse) { Table table = getTableWithHeader(req); - if (getSnapshotsResponse.isFailed()) { - ElasticsearchException causes = null; - - for (ElasticsearchException e : getSnapshotsResponse.getFailures().values()) { - if (causes == null) { - causes = e; - } else { - causes.addSuppressed(e); - } - } - throw new ElasticsearchException( - "Repositories [" - + Strings.collectionToCommaDelimitedString(getSnapshotsResponse.getFailures().keySet()) - + "] failed to retrieve snapshots", - causes - ); - } - for (SnapshotInfo snapshotStatus : getSnapshotsResponse.getSnapshots()) { table.startRow(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java index ef21e4103fd8..f763ac8f795f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java @@ -211,9 +211,9 @@ public abstract class InternalMultiBucketAggregation< List reducedBuckets = new ArrayList<>(); for (B bucket : getBuckets()) { List aggs = new ArrayList<>(); - for (Aggregation agg : bucket.getAggregations()) { + for (InternalAggregation agg : bucket.getAggregations()) { PipelineTree subTree = pipelineTree.subTree(agg.getName()); - aggs.add(((InternalAggregation) agg).reducePipelines((InternalAggregation) agg, reduceContext, subTree)); + aggs.add(agg.reducePipelines(agg, reduceContext, subTree)); } reducedBuckets.add(createBucket(InternalAggregations.from(aggs), bucket)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java b/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java index 739dc5874333..34fcf58e43bd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java @@ -20,10 +20,6 @@ public class InvalidAggregationPathException extends ElasticsearchException { super(msg); } - public InvalidAggregationPathException(String msg, Throwable cause) { - super(msg, cause); - } - public InvalidAggregationPathException(StreamInput in) throws IOException { super(in); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 441b30f872a3..4d946bfb3bb9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -461,7 +461,7 @@ public final class CompositeAggregator extends BucketsAggregator implements Size // Visit documents sorted by the leading source of the composite definition and terminates // when the leading source value is guaranteed to be greater than the lowest composite bucket // in the queue. - DocIdSet docIdSet = sortedDocsProducer.processLeaf(topLevelQuery(), queue, aggCtx.getLeafReaderContext(), fillDocIdSet); + DocIdSet docIdSet = sortedDocsProducer.processLeaf(queue, aggCtx.getLeafReaderContext(), fillDocIdSet); if (fillDocIdSet) { entries.add(new Entry(aggCtx, docIdSet)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java index 7c920abfe245..e88c9724edba 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PointValues; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSet; -import org.apache.lucene.search.Query; import org.apache.lucene.util.DocIdSetBuilder; import java.io.IOException; @@ -36,8 +35,7 @@ class PointsSortedDocsProducer extends SortedDocsProducer { } @Override - DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) - throws IOException { + DocIdSet processLeaf(CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) throws IOException { final PointValues values = context.reader().getPointValues(field); if (values == null) { // no value for the field diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java index 4503758c55b0..2d1b628482d4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java @@ -12,7 +12,6 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Query; import org.apache.lucene.util.Bits; import org.apache.lucene.util.DocIdSetBuilder; import org.elasticsearch.core.Nullable; @@ -99,6 +98,5 @@ abstract class SortedDocsProducer { * Returns the {@link DocIdSet} of the documents that contain a top composite bucket in this leaf or * {@link DocIdSet#EMPTY} if fillDocIdSet is false. */ - abstract DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) - throws IOException; + abstract DocIdSet processLeaf(CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java index e2aaba06a19e..3b62cb8f57d8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java @@ -14,7 +14,6 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSet; -import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.DocIdSetBuilder; @@ -29,8 +28,7 @@ class TermsSortedDocsProducer extends SortedDocsProducer { } @Override - DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) - throws IOException { + DocIdSet processLeaf(CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) throws IOException { final Terms terms = context.reader().terms(field); if (terms == null) { // no value for the field diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java index 7291a099dd7f..9994a2bca08b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java @@ -25,15 +25,7 @@ public class InternalDateRange extends InternalRange aggregations, - boolean keyed, - DocValueFormat formatter - ) { + public Bucket(String key, double from, double to, long docCount, List aggregations, DocValueFormat formatter) { super(key, from, to, docCount, InternalAggregations.from(aggregations), formatter); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java index 17982043e8e2..b65b0e1ec010 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java @@ -75,24 +75,6 @@ public abstract class BytesKeyedBucketOrds implements Releasable { * Read the current value. */ void readValue(BytesRef dest); - - /** - * An {@linkplain BucketOrdsEnum} that is empty. - */ - BucketOrdsEnum EMPTY = new BucketOrdsEnum() { - @Override - public boolean next() { - return false; - } - - @Override - public long ord() { - return 0; - } - - @Override - public void readValue(BytesRef dest) {} - }; } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index 5108793b8a80..9db9a4101662 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -72,7 +72,7 @@ public abstract class InternalSignificantTerms= states.size()) { return null; } - final DoubleHistogram state = states.get(bucketOrd); - return state; + return states.get(bucketOrd); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java index 00d7890e4710..2605fc1c0936 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java @@ -44,9 +44,6 @@ public abstract class AbstractHyperLogLogPlusPlus extends AbstractCardinalityAlg /** Get HyperLogLog algorithm */ protected abstract AbstractHyperLogLog.RunLenIterator getHyperLogLog(long bucketOrd); - /** Get the number of data structures */ - public abstract long maxOrd(); - /** Collect a value in the given bucket */ public abstract void collect(long bucketOrd, long hash); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java index 1f1cbd0b34a6..908fb4bb0a2e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java @@ -39,11 +39,6 @@ public abstract class AbstractLinearCounting extends AbstractCardinalityAlgorith */ protected abstract int size(long bucketOrd); - /** - * return the current values in the counter. - */ - protected abstract HashesIterator values(long bucketOrd); - public int collect(long bucketOrd, long hash) { final int k = encodeHash(hash, p); return addEncoded(bucketOrd, k); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java index 5edcf745c418..08e9de383691 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java @@ -68,11 +68,6 @@ public interface ExtendedStats extends Stats { */ String getStdDeviationSamplingAsString(); - /** - * The upper or lower bounds of stdDev of the collected values as a String. - */ - String getStdDeviationBoundAsString(Bounds bound); - /** * The sum of the squares of the collected values as a String. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java index 5af6a50a8c4a..16dfbdada4b0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java @@ -87,7 +87,6 @@ public final class HyperLogLogPlusPlus extends AbstractHyperLogLogPlusPlus { this.algorithm = algorithm; } - @Override public long maxOrd() { return hll.maxOrd(); } @@ -322,8 +321,7 @@ public final class HyperLogLogPlusPlus extends AbstractHyperLogLogPlusPlus { return size; } - @Override - protected HashesIterator values(long bucketOrd) { + private HashesIterator values(long bucketOrd) { // Make a fresh BytesRef for reading scratch work because this method can be called on many threads return new LinearCountingIterator(this, new BytesRef(), bucketOrd); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java index 1736b5ea7656..8b1dcfb8a2f8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java @@ -36,11 +36,6 @@ final class HyperLogLogPlusPlusSparse extends AbstractHyperLogLogPlusPlus implem this.lc = new LinearCounting(precision, bigArrays, initialBuckets); } - @Override - public long maxOrd() { - return lc.sizes.size(); - } - /** Needs to be called before adding elements into a bucket */ protected void ensureCapacity(long bucketOrd, long size) { lc.ensureCapacity(bucketOrd, size); @@ -135,8 +130,7 @@ final class HyperLogLogPlusPlusSparse extends AbstractHyperLogLogPlusPlus implem return size; } - @Override - protected HashesIterator values(long bucketOrd) { + private HashesIterator values(long bucketOrd) { return new LinearCountingIterator(values.get(bucketOrd), size(bucketOrd)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java index c3a106bd9af4..8a128b77a730 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java @@ -73,9 +73,8 @@ public abstract class InternalBounds extends InternalAgg }; } else if (path.size() == 2) { BoundingBox bbox = resolveBoundingBox(); - T cornerPoint = null; String cornerString = path.get(0); - cornerPoint = switch (cornerString) { + T cornerPoint = switch (cornerString) { case "top_left" -> bbox.topLeft(); case "bottom_right" -> bbox.bottomRight(); default -> throw new IllegalArgumentException("Found unknown path element [" + cornerString + "] in [" + getName() + "]"); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java index 7965211e2468..c6f4adc735c0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java @@ -245,8 +245,7 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat return valueAsString(Metrics.std_deviation_sampling.name()); } - @Override - public String getStdDeviationBoundAsString(Bounds bound) { + private String getStdDeviationBoundAsString(Bounds bound) { return switch (bound) { case UPPER -> valueAsString(Metrics.std_upper.name()); case LOWER -> valueAsString(Metrics.std_lower.name()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java index 48adad3cee61..e537c7348da6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java @@ -90,15 +90,6 @@ public abstract class InternalNumericMetricsAggregation extends InternalAggregat super(in); } - /** - * Read from a stream. - * - * @param readFormat whether to read the "format" field - */ - protected MultiValue(StreamInput in, boolean readFormat) throws IOException { - super(in, readFormat); - } - public abstract double value(String name); public String valueAsString(String name) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index ac37b287736a..2ec30b411928 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -662,7 +662,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder scriptFields = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { String scriptFieldName = parser.currentName(); token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { @@ -740,12 +740,12 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder paths = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { String path = parser.text(); paths.add(path); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java index 802aef5be68f..1213b1a71761 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java @@ -28,8 +28,8 @@ public class InternalBucketMetricValue extends InternalNumericMetricsAggregation public static final String NAME = "bucket_metric_value"; static final ParseField KEYS_FIELD = new ParseField("keys"); - private double value; - private String[] keys; + private final double value; + private final String[] keys; public InternalBucketMetricValue(String name, String[] keys, double value, DocValueFormat formatter, Map metadata) { super(name, formatter, metadata); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java index 667e34d85b79..beb125608cbe 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java @@ -26,9 +26,9 @@ import java.util.Map; import java.util.Objects; public class InternalPercentilesBucket extends InternalNumericMetricsAggregation.MultiValue implements PercentilesBucket { - private double[] percentiles; - private double[] percents; - private boolean keyed = true; + private final double[] percentiles; + private final double[] percents; + private final boolean keyed; private final transient Map percentileLookups = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java index 8337d644c9a9..86807e9772a2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java @@ -21,7 +21,7 @@ import java.util.Map; public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAggregator { private final double[] percents; - private boolean keyed = true; + private final boolean keyed; private List data; PercentilesBucketPipelineAggregator( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java index 2537d79a40bf..03b4867f6036 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java @@ -12,6 +12,7 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -169,11 +170,11 @@ public class SerialDiffPipelineAggregationBuilder extends AbstractPipelineAggreg } else if (token == XContentParser.Token.START_ARRAY) { if (BUCKETS_PATH.match(currentFieldName, parser.getDeprecationHandler())) { List paths = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { String path = parser.text(); paths.add(path); } - bucketsPaths = paths.toArray(new String[paths.size()]); + bucketsPaths = paths.toArray(Strings.EMPTY_ARRAY); } else { throw new ParsingException( parser.getTokenLocation(), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java index c720f3d9465a..02f300df4838 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java @@ -30,7 +30,6 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.DocCountFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.Rewriteable; @@ -309,14 +308,6 @@ public abstract class AggregationContext implements Releasable { public abstract Set sourcePath(String fullName); - /** - * Returns the MappingLookup for the index, if one is initialized. - */ - @Nullable - public MappingLookup getMappingLookup() { - return null; - } - /** * Does this index have a {@code _doc_count} field in any segment? */ @@ -621,11 +612,6 @@ public abstract class AggregationContext implements Releasable { return context.sourcePath(fullName); } - @Override - public MappingLookup getMappingLookup() { - return context.getMappingLookup(); - } - @Override public void close() { /* diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java index 31adf423d74c..23ccf1d94084 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java @@ -19,8 +19,6 @@ import java.util.concurrent.atomic.LongAdder; public class AggregationUsageService implements ReportingService { private static final String ES_SEARCH_QUERY_AGGREGATIONS_TOTAL_COUNT = "es.search.query.aggregations.total"; - private final String AGGREGATION_NAME_KEY = "aggregation_name"; - private final String VALUES_SOURCE_KEY = "values_source"; private final LongCounter aggregationsUsageCounter; private final Map> aggs; private final AggregationInfo info; @@ -83,6 +81,8 @@ public class AggregationUsageService implements ReportingService - values source type * @param - parser context */ - public static void declareField( + public static void declareField( String fieldName, AbstractObjectParser, T> objectParser, boolean scriptable, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java index 472619da7862..313f8e43014d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java @@ -263,11 +263,7 @@ public class TimeSeriesIndexSearcher { // true if the TSID ord has changed since the last time we checked boolean shouldPop() throws IOException { - if (tsidOrd != tsids.ordValue()) { - return true; - } else { - return false; - } + return tsidOrd != tsids.ordValue(); } } } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java index 1a9043d093fe..eb4e0394bb5a 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java @@ -11,7 +11,6 @@ package org.elasticsearch.transport; import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesReference; @@ -19,8 +18,11 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.metrics.CounterMetric; +import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.threadpool.ThreadPool; import java.io.EOFException; @@ -44,49 +46,17 @@ final class TransportHandshaker { * ignores the body of the request. After the handshake, the OutboundHandler uses the min(local,remote) protocol version for all later * messages. * - * This version supports three handshake protocols, v6080099, v7170099 and v8800000, which respectively have the same message structure - * as the transport protocols of v6.8.0, v7.17.0, and v8.18.0. This node only sends v7170099 requests, but it can send a valid response - * to any v6080099 or v8800000 requests that it receives. + * This version supports two handshake protocols, v7170099 and v8800000, which respectively have the same message structure as the + * transport protocols of v7.17.0, and v8.18.0. This node only sends v8800000 requests, but it can send a valid response to any v7170099 + * requests that it receives. * * Note that these are not really TransportVersion constants as used elsewhere in ES, they're independent things that just happen to be - * stored in the same location in the message header and which roughly match the same ID numbering scheme. Older versions of ES did - * rely on them matching the real transport protocol (which itself matched the release version numbers), but these days that's no longer + * stored in the same location in the message header and which roughly match the same ID numbering scheme. Older versions of ES did rely + * on them matching the real transport protocol (which itself matched the release version numbers), but these days that's no longer * true. * * Here are some example messages, broken down to show their structure. See TransportHandshakerRawMessageTests for supporting tests. * - * ## v6080099 Request: - * - * 45 53 -- 'ES' marker - * 00 00 00 34 -- total message length - * 00 00 00 00 00 00 00 01 -- request ID - * 08 -- status flags (0b1000 == handshake request) - * 00 5c c6 63 -- handshake protocol version (0x5cc663 == 6080099) - * 00 -- no request headers [1] - * 00 -- no response headers [1] - * 01 -- one feature [2] - * 06 -- feature name length - * 78 2d 70 61 63 6b -- feature name 'x-pack' - * 16 -- action string size - * 69 6e 74 65 72 6e 61 6c } - * 3a 74 63 70 2f 68 61 6e }- ASCII representation of HANDSHAKE_ACTION_NAME - * 64 73 68 61 6b 65 } - * 00 -- no parent task ID [3] - * 04 -- payload length - * 8b d5 b5 03 -- max acceptable protocol version (vInt: 00000011 10110101 11010101 10001011 == 7170699) - * - * ## v6080099 Response: - * - * 45 53 -- 'ES' marker - * 00 00 00 13 -- total message length - * 00 00 00 00 00 00 00 01 -- request ID (copied from request) - * 09 -- status flags (0b1001 == handshake response) - * 00 5c c6 63 -- handshake protocol version (0x5cc663 == 6080099, copied from request) - * 00 -- no request headers [1] - * 00 -- no response headers [1] - * c3 f9 eb 03 -- max acceptable protocol version (vInt: 00000011 11101011 11111001 11000011 == 8060099) - * - * * ## v7170099 Requests: * * 45 53 -- 'ES' marker @@ -158,14 +128,11 @@ final class TransportHandshaker { * [3] Parent task ID should be empty; see org.elasticsearch.tasks.TaskId.writeTo for its structure. */ - static final TransportVersion V7_HANDSHAKE_VERSION = TransportVersion.fromId(6_08_00_99); + private static final Logger logger = LogManager.getLogger(TransportHandshaker.class); + static final TransportVersion V8_HANDSHAKE_VERSION = TransportVersion.fromId(7_17_00_99); static final TransportVersion V9_HANDSHAKE_VERSION = TransportVersion.fromId(8_800_00_0); - static final Set ALLOWED_HANDSHAKE_VERSIONS = Set.of( - V7_HANDSHAKE_VERSION, - V8_HANDSHAKE_VERSION, - V9_HANDSHAKE_VERSION - ); + static final Set ALLOWED_HANDSHAKE_VERSIONS = Set.of(V8_HANDSHAKE_VERSION, V9_HANDSHAKE_VERSION); static final String HANDSHAKE_ACTION_NAME = "internal:tcp/handshake"; private final ConcurrentMap pendingHandshakes = new ConcurrentHashMap<>(); @@ -196,14 +163,14 @@ final class TransportHandshaker { ActionListener listener ) { numHandshakes.inc(); - final HandshakeResponseHandler handler = new HandshakeResponseHandler(requestId, listener); + final HandshakeResponseHandler handler = new HandshakeResponseHandler(requestId, channel, listener); pendingHandshakes.put(requestId, handler); channel.addCloseListener( ActionListener.running(() -> handler.handleLocalException(new TransportException("handshake failed because connection reset"))) ); boolean success = false; try { - handshakeRequestSender.sendRequest(node, channel, requestId, V8_HANDSHAKE_VERSION); + handshakeRequestSender.sendRequest(node, channel, requestId, V9_HANDSHAKE_VERSION); threadPool.schedule( () -> handler.handleLocalException(new ConnectTransportException(node, "handshake_timeout[" + timeout + "]")), @@ -222,9 +189,9 @@ final class TransportHandshaker { } void handleHandshake(TransportChannel channel, long requestId, StreamInput stream) throws IOException { + final HandshakeRequest handshakeRequest; try { - // Must read the handshake request to exhaust the stream - new HandshakeRequest(stream); + handshakeRequest = new HandshakeRequest(stream); } catch (Exception e) { assert ignoreDeserializationErrors : e; throw e; @@ -243,9 +210,44 @@ final class TransportHandshaker { assert ignoreDeserializationErrors : exception; throw exception; } + ensureCompatibleVersion(version, handshakeRequest.transportVersion, handshakeRequest.releaseVersion, channel); channel.sendResponse(new HandshakeResponse(this.version, Build.current().version())); } + static void ensureCompatibleVersion( + TransportVersion localTransportVersion, + TransportVersion remoteTransportVersion, + String releaseVersion, + Object channel + ) { + if (TransportVersion.isCompatible(remoteTransportVersion)) { + if (remoteTransportVersion.onOrAfter(localTransportVersion)) { + // Remote is newer than us, so we will be using our transport protocol and it's up to the other end to decide whether it + // knows how to do that. + return; + } + if (remoteTransportVersion.isKnown()) { + // Remote is older than us, so we will be using its transport protocol, which we can only do if and only if its protocol + // version is known to us. + return; + } + } + + final var message = Strings.format( + """ + Rejecting unreadable transport handshake from remote node with version [%s/%s] received on [%s] since this node has \ + version [%s/%s] which has an incompatible wire format.""", + releaseVersion, + remoteTransportVersion, + channel, + Build.current().version(), + localTransportVersion + ); + logger.warn(message); + throw new IllegalStateException(message); + + } + TransportResponseHandler removeHandlerForHandshake(long requestId) { return pendingHandshakes.remove(requestId); } @@ -261,11 +263,13 @@ final class TransportHandshaker { private class HandshakeResponseHandler implements TransportResponseHandler { private final long requestId; + private final TcpChannel channel; private final ActionListener listener; private final AtomicBoolean isDone = new AtomicBoolean(false); - private HandshakeResponseHandler(long requestId, ActionListener listener) { + private HandshakeResponseHandler(long requestId, TcpChannel channel, ActionListener listener) { this.requestId = requestId; + this.channel = channel; this.listener = listener; } @@ -282,20 +286,13 @@ final class TransportHandshaker { @Override public void handleResponse(HandshakeResponse response) { if (isDone.compareAndSet(false, true)) { - TransportVersion responseVersion = response.transportVersion; - if (TransportVersion.isCompatible(responseVersion) == false) { - listener.onFailure( - new IllegalStateException( - "Received message from unsupported version: [" - + responseVersion - + "] minimal compatible version is: [" - + TransportVersions.MINIMUM_COMPATIBLE - + "]" - ) - ); - } else { - listener.onResponse(TransportVersion.min(TransportHandshaker.this.version, response.getTransportVersion())); - } + ActionListener.completeWith(listener, () -> { + ensureCompatibleVersion(version, response.getTransportVersion(), response.getReleaseVersion(), channel); + final var resultVersion = TransportVersion.min(TransportHandshaker.this.version, response.getTransportVersion()); + assert TransportVersion.current().before(version) // simulating a newer-version transport service for test purposes + || resultVersion.isKnown() : "negotiated unknown version " + resultVersion; + return resultVersion; + }); } } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportStats.java b/server/src/main/java/org/elasticsearch/transport/TransportStats.java index 46b161b01e9f..1163cfbcb270 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportStats.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportStats.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.network.HandlingTimeTracker; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -70,18 +69,16 @@ public class TransportStats implements Writeable, ChunkedToXContent { rxSize = in.readVLong(); txCount = in.readVLong(); txSize = in.readVLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0) && in.readBoolean()) { - inboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; - for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { - inboundHandlingTimeBucketFrequencies[i] = in.readVLong(); - } - outboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; - for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { - outboundHandlingTimeBucketFrequencies[i] = in.readVLong(); - } - } else { - inboundHandlingTimeBucketFrequencies = new long[0]; - outboundHandlingTimeBucketFrequencies = new long[0]; + if (in.getTransportVersion().before(TransportVersions.TRANSPORT_STATS_HANDLING_TIME_REQUIRED)) { + in.readBoolean(); + } + inboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; + for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { + inboundHandlingTimeBucketFrequencies[i] = in.readVLong(); + } + outboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; + for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { + outboundHandlingTimeBucketFrequencies[i] = in.readVLong(); } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { transportActionStats = Collections.unmodifiableMap(in.readOrderedMap(StreamInput::readString, TransportActionStats::new)); @@ -99,15 +96,16 @@ public class TransportStats implements Writeable, ChunkedToXContent { out.writeVLong(rxSize); out.writeVLong(txCount); out.writeVLong(txSize); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { - assert (inboundHandlingTimeBucketFrequencies.length > 0) == (outboundHandlingTimeBucketFrequencies.length > 0); - out.writeBoolean(inboundHandlingTimeBucketFrequencies.length > 0); - for (long handlingTimeBucketFrequency : inboundHandlingTimeBucketFrequencies) { - out.writeVLong(handlingTimeBucketFrequency); - } - for (long handlingTimeBucketFrequency : outboundHandlingTimeBucketFrequencies) { - out.writeVLong(handlingTimeBucketFrequency); - } + assert inboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; + assert outboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; + if (out.getTransportVersion().before(TransportVersions.TRANSPORT_STATS_HANDLING_TIME_REQUIRED)) { + out.writeBoolean(true); + } + for (long handlingTimeBucketFrequency : inboundHandlingTimeBucketFrequencies) { + out.writeVLong(handlingTimeBucketFrequency); + } + for (long handlingTimeBucketFrequency : outboundHandlingTimeBucketFrequencies) { + out.writeVLong(handlingTimeBucketFrequency); } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { out.writeMap(transportActionStats, StreamOutput::writeWriteable); @@ -166,24 +164,13 @@ public class TransportStats implements Writeable, ChunkedToXContent { return transportActionStats; } - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - // Review and simplify the if-else blocks containing this symbol once v9 is released - private static final boolean IMPOSSIBLE_IN_V9 = true; - private boolean assertHistogramsConsistent() { assert inboundHandlingTimeBucketFrequencies.length == outboundHandlingTimeBucketFrequencies.length; - if (inboundHandlingTimeBucketFrequencies.length == 0) { - // Stats came from before v8.1 - assert IMPOSSIBLE_IN_V9; - } else { - assert inboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; - } + assert inboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; return true; } @Override - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - // review the "if" blocks checking for non-empty once we have public Iterator toXContentChunked(ToXContent.Params outerParams) { return Iterators.concat(Iterators.single((builder, params) -> { builder.startObject(Fields.TRANSPORT); @@ -193,19 +180,10 @@ public class TransportStats implements Writeable, ChunkedToXContent { builder.humanReadableField(Fields.RX_SIZE_IN_BYTES, Fields.RX_SIZE, ByteSizeValue.ofBytes(rxSize)); builder.field(Fields.TX_COUNT, txCount); builder.humanReadableField(Fields.TX_SIZE_IN_BYTES, Fields.TX_SIZE, ByteSizeValue.ofBytes(txSize)); - if (inboundHandlingTimeBucketFrequencies.length > 0) { - histogramToXContent(builder, inboundHandlingTimeBucketFrequencies, Fields.INBOUND_HANDLING_TIME_HISTOGRAM); - histogramToXContent(builder, outboundHandlingTimeBucketFrequencies, Fields.OUTBOUND_HANDLING_TIME_HISTOGRAM); - } else { - // Stats came from before v8.1 - assert IMPOSSIBLE_IN_V9; - } - if (transportActionStats.isEmpty() == false) { - builder.startObject(Fields.ACTIONS); - } else { - // Stats came from before v8.8 - assert IMPOSSIBLE_IN_V9; - } + assert inboundHandlingTimeBucketFrequencies.length > 0; + histogramToXContent(builder, inboundHandlingTimeBucketFrequencies, Fields.INBOUND_HANDLING_TIME_HISTOGRAM); + histogramToXContent(builder, outboundHandlingTimeBucketFrequencies, Fields.OUTBOUND_HANDLING_TIME_HISTOGRAM); + builder.startObject(Fields.ACTIONS); return builder; }), @@ -215,12 +193,7 @@ public class TransportStats implements Writeable, ChunkedToXContent { return builder; }), - Iterators.single((builder, params) -> { - if (transportActionStats.isEmpty() == false) { - builder.endObject(); - } - return builder.endObject(); - }) + Iterators.single((builder, params) -> { return builder.endObject().endObject(); }) ); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java index 13479d188536..d3e5c1c7268f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -31,14 +30,10 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; -import static org.hamcrest.CoreMatchers.containsString; - public class GetSnapshotsResponseTests extends ESTestCase { // We can not subclass AbstractSerializingTestCase because it // can only be used for instances with equals and hashCode @@ -60,12 +55,6 @@ public class GetSnapshotsResponseTests extends ESTestCase { private void assertEqualInstances(GetSnapshotsResponse expectedInstance, GetSnapshotsResponse newInstance) { assertEquals(expectedInstance.getSnapshots(), newInstance.getSnapshots()); assertEquals(expectedInstance.next(), newInstance.next()); - assertEquals(expectedInstance.getFailures().keySet(), newInstance.getFailures().keySet()); - for (Map.Entry expectedEntry : expectedInstance.getFailures().entrySet()) { - ElasticsearchException expectedException = expectedEntry.getValue(); - ElasticsearchException newException = newInstance.getFailures().get(expectedEntry.getKey()); - assertThat(newException.getMessage(), containsString(expectedException.getMessage())); - } } private List createSnapshotInfos(String repoName) { @@ -99,7 +88,6 @@ public class GetSnapshotsResponseTests extends ESTestCase { private GetSnapshotsResponse createTestInstance() { Set repositories = new HashSet<>(); - Map failures = new HashMap<>(); List responses = new ArrayList<>(); for (int i = 0; i < randomIntBetween(0, 5); i++) { @@ -111,12 +99,10 @@ public class GetSnapshotsResponseTests extends ESTestCase { for (int i = 0; i < randomIntBetween(0, 5); i++) { String repository = randomValueOtherThanMany(repositories::contains, () -> randomAlphaOfLength(10)); repositories.add(repository); - failures.put(repository, new ElasticsearchException(randomAlphaOfLength(10))); } return new GetSnapshotsResponse( responses, - failures, randomBoolean() ? Base64.getUrlEncoder() .encodeToString( diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageActionTests.java index e1214355bd41..658637c4d379 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageActionTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.project.TestProjectResolvers; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -306,7 +305,7 @@ public class TransportAnalyzeIndexDiskUsageActionTests extends ESTestCase { } ) { @Override - protected GroupShardsIterator shards( + protected List shards( ClusterState clusterState, AnalyzeIndexDiskUsageRequest request, String[] concreteIndices @@ -315,7 +314,7 @@ public class TransportAnalyzeIndexDiskUsageActionTests extends ESTestCase { for (Map.Entry> e : targetShards.entrySet()) { shardIterators.add(new PlainShardIterator(e.getKey(), e.getValue())); } - return new GroupShardsIterator<>(shardIterators); + return shardIterators; } }; } diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 47ff4ca6f060..11085558dbe1 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.UUIDs; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; @@ -83,9 +82,7 @@ public class AbstractSearchAsyncActionTests extends ESTestCase { null, request, listener, - new GroupShardsIterator<>( - Collections.singletonList(new SearchShardIterator(null, new ShardId("index", "_na", 0), Collections.emptyList(), null)) - ), + Collections.singletonList(new SearchShardIterator(null, new ShardId("index", "_na", 0), Collections.emptyList(), null)), timeProvider, ClusterState.EMPTY_STATE, null, diff --git a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 0c18f446a57e..a60ab295cd3e 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.action.search; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.CanMatchNodeResponse.ResponseOrFailure; @@ -22,7 +23,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; @@ -135,9 +135,9 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { } }; - AtomicReference> result = new AtomicReference<>(); + AtomicReference> result = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), 2, @@ -232,9 +232,9 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { } }; - AtomicReference> result = new AtomicReference<>(); + AtomicReference> result = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), 2, @@ -325,9 +325,9 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { } }; - AtomicReference> result = new AtomicReference<>(); + AtomicReference> result = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "logs", new OriginalIndices(new String[] { "logs" }, SearchRequest.DEFAULT_INDICES_OPTIONS), randomIntBetween(2, 20), @@ -427,9 +427,9 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { } }; - AtomicReference> result = new AtomicReference<>(); + AtomicReference> result = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "logs", new OriginalIndices(new String[] { "logs" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -1202,7 +1202,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { false, new ActionListener<>() { @Override - public void onResponse(GroupShardsIterator searchShardIterators) { + public void onResponse(List searchShardIterators) { fail(null, "unexpected success with result [%s] while expecting to handle failure with [%s]", searchShardIterators); latch.countDown(); } @@ -1268,7 +1268,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { boolean allowPartialResults, BiConsumer, List> canMatchResultsConsumer ) throws Exception { - AtomicReference> result = new AtomicReference<>(); + AtomicReference> result = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); Tuple> canMatchAndShardRequests = getCanMatchPhaseAndRequests( dataStreams, @@ -1305,7 +1305,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { SuggestBuilder suggest, List unassignedIndices, boolean allowPartialResults, - ActionListener> canMatchActionListener + ActionListener> canMatchActionListener ) { Map lookup = new ConcurrentHashMap<>(); DiscoveryNode primaryNode = DiscoveryNodeUtils.create("node_1"); @@ -1324,7 +1324,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { String[] indices = indicesToSearch.toArray(new String[0]); OriginalIndices originalIndices = new OriginalIndices(indices, SearchRequest.DEFAULT_INDICES_OPTIONS); - final List originalShardIters = new ArrayList<>(); + final List shardIters = new ArrayList<>(); for (var dataStream : dataStreams) { boolean atLeastOnePrimaryAssigned = false; for (var dataStreamIndex : dataStream.getIndices()) { @@ -1333,9 +1333,9 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { boolean withAssignedPrimaries = randomBoolean() || atLeastOnePrimaryAssigned == false; int numShards = randomIntBetween(1, 6); if (unassignedIndices.contains(dataStreamIndex)) { - originalShardIters.addAll(getShardsIter(dataStreamIndex, originalIndices, numShards, false, null, null)); + shardIters.addAll(getShardsIter(dataStreamIndex, originalIndices, numShards, false, null, null)); } else { - originalShardIters.addAll( + shardIters.addAll( getShardsIter(dataStreamIndex, originalIndices, numShards, false, withAssignedPrimaries ? primaryNode : null, null) ); atLeastOnePrimaryAssigned |= withAssignedPrimaries; @@ -1345,14 +1345,14 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { for (Index regularIndex : regularIndices) { if (unassignedIndices.contains(regularIndex)) { - originalShardIters.addAll(getShardsIter(regularIndex, originalIndices, randomIntBetween(1, 6), false, null, null)); + shardIters.addAll(getShardsIter(regularIndex, originalIndices, randomIntBetween(1, 6), false, null, null)); } else { - originalShardIters.addAll( + shardIters.addAll( getShardsIter(regularIndex, originalIndices, randomIntBetween(1, 6), randomBoolean(), primaryNode, replicaNode) ); } } - GroupShardsIterator shardsIter = GroupShardsIterator.sortAndCreate(originalShardIters); + CollectionUtil.timSort(shardIters); final SearchRequest searchRequest = new SearchRequest(); searchRequest.indices(indices); @@ -1415,7 +1415,6 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { System::nanoTime ); - AtomicReference> result = new AtomicReference<>(); return new Tuple<>( new CanMatchPreFilterSearchPhase( logger, @@ -1425,7 +1424,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { Collections.emptyMap(), threadPool.executor(ThreadPool.Names.SEARCH_COORDINATION), searchRequest, - shardsIter, + shardIters, timeProvider, null, true, diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 65fdec96c92f..5f8473959951 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -144,7 +144,6 @@ public class ExpandSearchPhaseTests extends ESTestCase { hits.decRef(); } } finally { - mockSearchPhaseContext.execute(() -> {}); var resp = mockSearchPhaseContext.searchResponse.get(); if (resp != null) { resp.decRef(); diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index e8e12300c23e..97d420b7cd3c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -13,7 +13,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -62,7 +61,7 @@ public final class MockSearchPhaseContext extends AbstractSearchAsyncAction(List.of()), + List.of(), null, ClusterState.EMPTY_STATE, new SearchTask(0, "n/a", "n/a", () -> "test", null, Collections.emptyMap()), diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 7e9e6f623cab..647d16977181 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; @@ -67,7 +66,7 @@ public class SearchAsyncActionTests extends ESTestCase { DiscoveryNode replicaNode = DiscoveryNodeUtils.create("node_2"); AtomicInteger contextIdGenerator = new AtomicInteger(0); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -182,7 +181,7 @@ public class SearchAsyncActionTests extends ESTestCase { DiscoveryNode replicaNode = DiscoveryNodeUtils.create("node_1"); AtomicInteger contextIdGenerator = new AtomicInteger(0); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -285,7 +284,7 @@ public class SearchAsyncActionTests extends ESTestCase { Map> nodeToContextMap = newConcurrentMap(); AtomicInteger contextIdGenerator = new AtomicInteger(0); int numShards = randomIntBetween(1, 10); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -415,7 +414,7 @@ public class SearchAsyncActionTests extends ESTestCase { Map> nodeToContextMap = newConcurrentMap(); AtomicInteger contextIdGenerator = new AtomicInteger(0); int numShards = randomIntBetween(2, 10); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -534,7 +533,7 @@ public class SearchAsyncActionTests extends ESTestCase { DiscoveryNode replicaNode = DiscoveryNodeUtils.create("node_1"); AtomicInteger contextIdGenerator = new AtomicInteger(0); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -647,7 +646,7 @@ public class SearchAsyncActionTests extends ESTestCase { searchShardIterator.reset(); searchShardIterators.add(searchShardIterator); } - GroupShardsIterator shardsIter = new GroupShardsIterator<>(searchShardIterators); + List shardsIter = searchShardIterators; Map lookup = Map.of(primaryNode.getId(), new MockConnection(primaryNode)); CountDownLatch latch = new CountDownLatch(1); @@ -706,7 +705,7 @@ public class SearchAsyncActionTests extends ESTestCase { assertThat(searchResponse.get().getSuccessfulShards(), equalTo(shardsIter.size())); } - static GroupShardsIterator getShardsIter( + static List getShardsIter( String index, OriginalIndices originalIndices, int numShards, @@ -714,9 +713,7 @@ public class SearchAsyncActionTests extends ESTestCase { DiscoveryNode primaryNode, DiscoveryNode replicaNode ) { - return new GroupShardsIterator<>( - getShardsIter(new Index(index, "_na_"), originalIndices, numShards, doReplicas, primaryNode, replicaNode) - ); + return getShardsIter(new Index(index, "_na_"), originalIndices, numShards, doReplicas, primaryNode, replicaNode); } static List getShardsIter( diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index 661a9fd8c854..be693a2d7d29 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; @@ -42,6 +41,7 @@ import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.transport.Transport; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; @@ -150,7 +150,7 @@ public class SearchQueryThenFetchAsyncActionTests extends ESTestCase { } }; CountDownLatch latch = new CountDownLatch(1); - GroupShardsIterator shardsIter = SearchAsyncActionTests.getShardsIter( + List shardsIter = SearchAsyncActionTests.getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java index 2ecdc1be9c36..79736427f634 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java @@ -12,7 +12,8 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.OriginalIndicesTests; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.routing.GroupShardsIteratorTests; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; @@ -24,10 +25,24 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.hamcrest.Matchers.equalTo; public class SearchShardIteratorTests extends ESTestCase { + public static List randomShardRoutings(ShardId shardId) { + return randomShardRoutings(shardId, randomIntBetween(0, 2)); + } + + private static List randomShardRoutings(ShardId shardId, int numReplicas) { + List shardRoutings = new ArrayList<>(); + shardRoutings.add(TestShardRouting.newShardRouting(shardId, randomAlphaOfLengthBetween(5, 10), true, STARTED)); + for (int j = 0; j < numReplicas; j++) { + shardRoutings.add(TestShardRouting.newShardRouting(shardId, randomAlphaOfLengthBetween(5, 10), false, STARTED)); + } + return shardRoutings; + } + public void testShardId() { ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomInt()); SearchShardIterator searchShardIterator = new SearchShardIterator(null, shardId, Collections.emptyList(), OriginalIndices.NONE); @@ -149,19 +164,14 @@ public class SearchShardIteratorTests extends ESTestCase { for (String uuid : uuids) { ShardId shardId = new ShardId(index, uuid, i); shardIterators.add( - new SearchShardIterator( - null, - shardId, - GroupShardsIteratorTests.randomShardRoutings(shardId), - OriginalIndicesTests.randomOriginalIndices() - ) + new SearchShardIterator(null, shardId, randomShardRoutings(shardId), OriginalIndicesTests.randomOriginalIndices()) ); for (String cluster : clusters) { shardIterators.add( new SearchShardIterator( cluster, shardId, - GroupShardsIteratorTests.randomShardRoutings(shardId), + randomShardRoutings(shardId), OriginalIndicesTests.randomOriginalIndices() ) ); @@ -207,11 +217,6 @@ public class SearchShardIteratorTests extends ESTestCase { private static SearchShardIterator randomSearchShardIterator() { String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomIntBetween(0, Integer.MAX_VALUE)); - return new SearchShardIterator( - clusterAlias, - shardId, - GroupShardsIteratorTests.randomShardRoutings(shardId), - OriginalIndicesTests.randomOriginalIndices() - ); + return new SearchShardIterator(clusterAlias, shardId, randomShardRoutings(shardId), OriginalIndicesTests.randomOriginalIndices()); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index cb6585db91bd..7ab9b8611b8c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -40,8 +40,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.VersionInformation; import org.elasticsearch.cluster.project.TestProjectResolvers; -import org.elasticsearch.cluster.routing.GroupShardsIterator; -import org.elasticsearch.cluster.routing.GroupShardsIteratorTests; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -154,7 +152,7 @@ public class TransportSearchActionTests extends ESTestCase { String clusterAlias ) { ShardId shardId = new ShardId(index, id); - List shardRoutings = GroupShardsIteratorTests.randomShardRoutings(shardId); + List shardRoutings = SearchShardIteratorTests.randomShardRoutings(shardId); return new SearchShardIterator(clusterAlias, shardId, shardRoutings, originalIndices); } @@ -253,7 +251,7 @@ public class TransportSearchActionTests extends ESTestCase { Collections.shuffle(localShardIterators, random()); Collections.shuffle(remoteShardIterators, random()); - GroupShardsIterator groupShardsIterator = TransportSearchAction.mergeShardsIterators( + List groupShardsIterator = TransportSearchAction.mergeShardsIterators( localShardIterators, remoteShardIterators ); diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java index d81c6c860831..3200bbc80a42 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction.FailedShardUpdate import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNodesHelper; import org.elasticsearch.cluster.routing.RoutingTable; @@ -243,7 +242,7 @@ public class ShardFailedClusterStateTaskExecutorTests extends ESAllocationTestCa private List createExistingShards(ClusterState currentState, String reason) { List shards = new ArrayList<>(); - GroupShardsIterator shardGroups = currentState.routingTable().allAssignedShardsGrouped(new String[] { INDEX }, true); + List shardGroups = currentState.routingTable().allAssignedShardsGrouped(new String[] { INDEX }, true); for (ShardIterator shardIt : shardGroups) { for (ShardRouting shard : shardIt) { shards.add(shard); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java deleted file mode 100644 index 8e111c367628..000000000000 --- a/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.routing; - -import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.action.OriginalIndicesTests; -import org.elasticsearch.action.search.SearchShardIterator; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.test.ESTestCase; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; - -public class GroupShardsIteratorTests extends ESTestCase { - - public static List randomShardRoutings(ShardId shardId) { - return randomShardRoutings(shardId, randomIntBetween(0, 2)); - } - - private static List randomShardRoutings(ShardId shardId, int numReplicas) { - List shardRoutings = new ArrayList<>(); - shardRoutings.add(TestShardRouting.newShardRouting(shardId, randomAlphaOfLengthBetween(5, 10), true, STARTED)); - for (int j = 0; j < numReplicas; j++) { - shardRoutings.add(TestShardRouting.newShardRouting(shardId, randomAlphaOfLengthBetween(5, 10), false, STARTED)); - } - return shardRoutings; - } - - public void testSize() { - List list = new ArrayList<>(); - Index index = new Index("foo", "na"); - { - ShardId shardId = new ShardId(index, 0); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId, 2))); - } - list.add(new PlainShardIterator(new ShardId(index, 1), Collections.emptyList())); - { - ShardId shardId = new ShardId(index, 2); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId, 0))); - } - index = new Index("foo_1", "na"); - { - ShardId shardId = new ShardId(index, 0); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId, 0))); - } - { - ShardId shardId = new ShardId(index, 1); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId, 0))); - } - GroupShardsIterator iter = new GroupShardsIterator<>(list); - assertEquals(7, iter.totalSizeWith1ForEmpty()); - assertEquals(5, iter.size()); - assertEquals(6, iter.totalSize()); - } - - public void testIterate() { - List list = new ArrayList<>(); - Index index = new Index("foo", "na"); - { - ShardId shardId = new ShardId(index, 0); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - list.add(new PlainShardIterator(new ShardId(index, 1), Collections.emptyList())); - { - ShardId shardId = new ShardId(index, 2); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - { - ShardId shardId = new ShardId(index, 0); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - { - ShardId shardId = new ShardId(index, 1); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - index = new Index("foo_2", "na"); - { - ShardId shardId = new ShardId(index, 0); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - { - ShardId shardId = new ShardId(index, 1); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - - Collections.shuffle(list, random()); - { - GroupShardsIterator unsorted = new GroupShardsIterator<>(list); - GroupShardsIterator iter = new GroupShardsIterator<>(list); - List actualIterators = new ArrayList<>(); - for (ShardIterator shardsIterator : iter) { - actualIterators.add(shardsIterator); - } - assertEquals(actualIterators, list); - } - { - GroupShardsIterator iter = GroupShardsIterator.sortAndCreate(list); - List actualIterators = new ArrayList<>(); - for (ShardIterator shardsIterator : iter) { - actualIterators.add(shardsIterator); - } - CollectionUtil.timSort(actualIterators); - assertEquals(actualIterators, list); - } - } - - public void testOrderingWithSearchShardIterators() { - String[] indices = generateRandomStringArray(10, 10, false, false); - Arrays.sort(indices); - String[] uuids = generateRandomStringArray(5, 10, false, false); - Arrays.sort(uuids); - String[] clusters = generateRandomStringArray(5, 10, false, false); - Arrays.sort(clusters); - - List sorted = new ArrayList<>(); - int numShards = randomIntBetween(1, 10); - for (int i = 0; i < numShards; i++) { - for (String index : indices) { - for (String uuid : uuids) { - ShardId shardId = new ShardId(index, uuid, i); - SearchShardIterator shardIterator = new SearchShardIterator( - null, - shardId, - GroupShardsIteratorTests.randomShardRoutings(shardId), - OriginalIndicesTests.randomOriginalIndices() - ); - sorted.add(shardIterator); - for (String cluster : clusters) { - SearchShardIterator remoteIterator = new SearchShardIterator( - cluster, - shardId, - GroupShardsIteratorTests.randomShardRoutings(shardId), - OriginalIndicesTests.randomOriginalIndices() - ); - sorted.add(remoteIterator); - } - } - } - } - - List shuffled = new ArrayList<>(sorted); - Collections.shuffle(shuffled, random()); - { - List actualIterators = new ArrayList<>(); - GroupShardsIterator iter = new GroupShardsIterator<>(shuffled); - for (SearchShardIterator searchShardIterator : iter) { - actualIterators.add(searchShardIterator); - } - assertEquals(shuffled, actualIterators); - } - { - List actualIterators = new ArrayList<>(); - GroupShardsIterator iter = GroupShardsIterator.sortAndCreate(shuffled); - for (SearchShardIterator searchShardIterator : iter) { - actualIterators.add(searchShardIterator); - } - assertEquals(sorted, actualIterators); - } - } -} diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java index 60f86d62d523..2f90a72ce12c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java @@ -173,12 +173,7 @@ public class OperationRoutingTests extends ESTestCase { for (int i = 0; i < numRepeatedSearches; i++) { List searchedShards = new ArrayList<>(numShards); Set selectedNodes = Sets.newHashSetWithExpectedSize(numShards); - final GroupShardsIterator groupIterator = opRouting.searchShards( - state.projectState(projectId), - indexNames, - null, - sessionKey - ); + final List groupIterator = opRouting.searchShards(state.projectState(projectId), indexNames, null, sessionKey); assertThat("One group per index shard", groupIterator.size(), equalTo(numIndices * numShards)); for (ShardIterator shardIterator : groupIterator) { @@ -305,14 +300,7 @@ public class OperationRoutingTests extends ESTestCase { TestThreadPool threadPool = new TestThreadPool("test"); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); ResponseCollectorService collector = new ResponseCollectorService(clusterService); - GroupShardsIterator groupIterator = opRouting.searchShards( - project, - indexNames, - null, - null, - collector, - new HashMap<>() - ); + List groupIterator = opRouting.searchShards(project, indexNames, null, null, collector, new HashMap<>()); assertThat("One group per index shard", groupIterator.size(), equalTo(numIndices * numShards)); @@ -393,14 +381,7 @@ public class OperationRoutingTests extends ESTestCase { ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); ResponseCollectorService collector = new ResponseCollectorService(clusterService); - GroupShardsIterator groupIterator = opRouting.searchShards( - project, - indexNames, - null, - null, - collector, - new HashMap<>() - ); + List groupIterator = opRouting.searchShards(project, indexNames, null, null, collector, new HashMap<>()); assertThat("One group per index shard", groupIterator.size(), equalTo(numIndices * numShards)); // We have two nodes, where the second has more load @@ -461,14 +442,7 @@ public class OperationRoutingTests extends ESTestCase { Map outstandingRequests = new HashMap<>(); // Check that we choose to search over both nodes - GroupShardsIterator groupIterator = opRouting.searchShards( - project, - indexNames, - null, - null, - collector, - outstandingRequests - ); + List groupIterator = opRouting.searchShards(project, indexNames, null, null, collector, outstandingRequests); Set nodeIds = new HashSet<>(); nodeIds.add(groupIterator.get(0).nextOrNull().currentNodeId()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/PlainShardIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/PlainShardIteratorTests.java index 0d6cc3fad1e5..a7d3ed95320c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/PlainShardIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/PlainShardIteratorTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing; +import org.elasticsearch.action.search.SearchShardIteratorTests; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; @@ -55,7 +56,7 @@ public class PlainShardIteratorTests extends ESTestCase { for (String index : indices) { for (String uuid : uuids) { ShardId shardId = new ShardId(index, uuid, i); - shardIterators.add(new PlainShardIterator(shardId, GroupShardsIteratorTests.randomShardRoutings(shardId))); + shardIterators.add(new PlainShardIterator(shardId, SearchShardIteratorTests.randomShardRoutings(shardId))); } } } @@ -86,6 +87,6 @@ public class PlainShardIteratorTests extends ESTestCase { private static PlainShardIterator randomPlainShardIterator() { ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomIntBetween(1, Integer.MAX_VALUE)); - return new PlainShardIterator(shardId, GroupShardsIteratorTests.randomShardRoutings(shardId)); + return new PlainShardIterator(shardId, SearchShardIteratorTests.randomShardRoutings(shardId)); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java index 7f3d68e5dce2..7d3fa2d79126 100644 --- a/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GlobalRoutingTable; import org.elasticsearch.cluster.routing.GlobalRoutingTableTestHelper; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.RotationShardShuffler; @@ -387,12 +386,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase { new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) ); - GroupShardsIterator shardIterators = operationRouting.searchShards( - project, - new String[] { "test" }, - null, - "_shards:0" - ); + List shardIterators = operationRouting.searchShards(project, new String[] { "test" }, null, "_shards:0"); assertThat(shardIterators.size(), equalTo(1)); assertThat(shardIterators.iterator().next().shardId().id(), equalTo(0)); diff --git a/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java b/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java index 35e24a3f20b3..ef893db4ba18 100644 --- a/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java @@ -106,7 +106,7 @@ public class AbstractFileWatchingServiceTests extends ESTestCase { env = newEnvironment(Settings.EMPTY); - Files.createDirectories(env.configFile()); + Files.createDirectories(env.configDir()); watchedFile = getWatchedFilePath(env); updates = new ArrayBlockingQueue<>(5); @@ -206,7 +206,7 @@ public class AbstractFileWatchingServiceTests extends ESTestCase { } private static Path getWatchedFilePath(Environment env) { - return env.configFile().toAbsolutePath().resolve("test").resolve("test.json"); + return env.configDir().toAbsolutePath().resolve("test").resolve("test.json"); } } diff --git a/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java b/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java index 0dd1db64c144..0c2dc68a0146 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java @@ -97,7 +97,7 @@ public class LocallyMountedSecretsTests extends ESTestCase { } public void testProcessSettingsFile() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getVersion(), equalTo(1L)); @@ -109,7 +109,7 @@ public class LocallyMountedSecretsTests extends ESTestCase { } public void testProcessDeprecatedSettingsFile() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSONDepricated); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSONDepricated); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getVersion(), equalTo(1L)); @@ -119,7 +119,7 @@ public class LocallyMountedSecretsTests extends ESTestCase { } public void testDuplicateSettingKeys() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSONDuplicateKeys); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSONDuplicateKeys); Exception e = expectThrows(Exception.class, () -> new LocallyMountedSecrets(env)); assertThat(e, instanceOf(XContentParseException.class)); assertThat(e.getMessage(), containsString("failed to parse field")); @@ -134,7 +134,7 @@ public class LocallyMountedSecretsTests extends ESTestCase { } public void testSettingsGetFile() throws IOException, GeneralSecurityException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getSettingNames(), containsInAnyOrder("aaa", "ccc", "eee")); @@ -165,7 +165,7 @@ public class LocallyMountedSecretsTests extends ESTestCase { } public void testSettingsSHADigest() throws IOException, GeneralSecurityException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getSettingNames(), containsInAnyOrder("aaa", "ccc", "eee")); @@ -178,7 +178,7 @@ public class LocallyMountedSecretsTests extends ESTestCase { } public void testProcessBadSettingsFile() throws IOException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), noMetadataJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), noMetadataJSON); assertThat( expectThrows(IllegalArgumentException.class, () -> new LocallyMountedSecrets(env)).getMessage(), containsString("Required [metadata]") @@ -186,7 +186,7 @@ public class LocallyMountedSecretsTests extends ESTestCase { } public void testSerializationWithSecrets() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); final BytesStreamOutput out = new BytesStreamOutput(); @@ -213,7 +213,7 @@ public class LocallyMountedSecretsTests extends ESTestCase { } public void testClose() throws IOException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertEquals("bbb", secrets.getString("aaa").toString()); assertEquals("ddd", secrets.getString("ccc").toString()); diff --git a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java index 25ac11b516dc..834f53dc410d 100644 --- a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java @@ -34,20 +34,20 @@ public class EnvironmentTests extends ESTestCase { public void testRepositoryResolution() throws IOException { Environment environment = newEnvironment(); - assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue()); - assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue()); + assertThat(environment.resolveRepoDir("/test/repos/repo1"), nullValue()); + assertThat(environment.resolveRepoDir("test/repos/repo1"), nullValue()); environment = newEnvironment( Settings.builder() .putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other") .build() ); - assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("/test/repos/../repo1"), nullValue()); - assertThat(environment.resolveRepoFile("/test/repos/../repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("/somethingeles/repos/repo1"), nullValue()); - assertThat(environment.resolveRepoFile("/test/other/repo"), notNullValue()); + assertThat(environment.resolveRepoDir("/test/repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("test/repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("/another/repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("/test/repos/../repo1"), nullValue()); + assertThat(environment.resolveRepoDir("/test/repos/../repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("/somethingeles/repos/repo1"), nullValue()); + assertThat(environment.resolveRepoDir("/test/other/repo"), notNullValue()); assertThat(environment.resolveRepoURL(new URL("file:///test/repos/repo1")), notNullValue()); assertThat(environment.resolveRepoURL(new URL("file:/test/repos/repo1")), notNullValue()); @@ -66,7 +66,7 @@ public class EnvironmentTests extends ESTestCase { final Path pathHome = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", pathHome).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.dataFiles(), equalTo(new Path[] { pathHome.resolve("data") })); + assertThat(environment.dataDirs(), equalTo(new Path[] { pathHome.resolve("data") })); } public void testPathDataNotSetInEnvironmentIfNotSet() { @@ -82,41 +82,41 @@ public class EnvironmentTests extends ESTestCase { .put("path.data", createTempDir().toAbsolutePath() + "," + createTempDir().toAbsolutePath()) .build(); final Environment environment = new Environment(settings, null); - assertThat(environment.dataFiles(), arrayWithSize(2)); + assertThat(environment.dataDirs(), arrayWithSize(2)); } public void testPathLogsWhenNotSet() { final Path pathHome = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", pathHome).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.logsFile(), equalTo(pathHome.resolve("logs"))); + assertThat(environment.logsDir(), equalTo(pathHome.resolve("logs"))); } public void testDefaultConfigPath() { final Path path = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", path).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.configFile(), equalTo(path.resolve("config"))); + assertThat(environment.configDir(), equalTo(path.resolve("config"))); } public void testConfigPath() { final Path configPath = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", createTempDir().toAbsolutePath()).build(); final Environment environment = new Environment(settings, configPath); - assertThat(environment.configFile(), equalTo(configPath)); + assertThat(environment.configDir(), equalTo(configPath)); } public void testConfigPathWhenNotSet() { final Path pathHome = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", pathHome).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.configFile(), equalTo(pathHome.resolve("config"))); + assertThat(environment.configDir(), equalTo(pathHome.resolve("config"))); } public void testNonExistentTempPathValidation() { Settings build = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); Environment environment = new Environment(build, null, createTempDir().resolve("this_does_not_exist")); - FileNotFoundException e = expectThrows(FileNotFoundException.class, environment::validateTmpFile); + FileNotFoundException e = expectThrows(FileNotFoundException.class, environment::validateTmpDir); assertThat(e.getMessage(), startsWith("Temporary directory [")); assertThat(e.getMessage(), endsWith("this_does_not_exist] does not exist or is not accessible")); } @@ -124,7 +124,7 @@ public class EnvironmentTests extends ESTestCase { public void testTempPathValidationWhenRegularFile() throws IOException { Settings build = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); Environment environment = new Environment(build, null, createTempFile("something", ".test")); - IOException e = expectThrows(IOException.class, environment::validateTmpFile); + IOException e = expectThrows(IOException.class, environment::validateTmpDir); assertThat(e.getMessage(), startsWith("Temporary directory [")); assertThat(e.getMessage(), endsWith(".test] is not a directory")); } diff --git a/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java b/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java index 309d3fc88470..276c01d041bf 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java @@ -132,7 +132,7 @@ public class NodeRepurposeCommandTests extends ESTestCase { boolean hasClusterState = randomBoolean(); createIndexDataFiles(dataMasterSettings, shardCount, hasClusterState); - String messageText = NodeRepurposeCommand.noMasterMessage(1, environment.dataFiles().length * shardCount, 0); + String messageText = NodeRepurposeCommand.noMasterMessage(1, environment.dataDirs().length * shardCount, 0); Matcher outputMatcher = allOf( containsString(messageText), @@ -158,7 +158,7 @@ public class NodeRepurposeCommandTests extends ESTestCase { createIndexDataFiles(dataMasterSettings, shardCount, hasClusterState); Matcher matcher = allOf( - containsString(NodeRepurposeCommand.shardMessage(environment.dataFiles().length * shardCount, 1)), + containsString(NodeRepurposeCommand.shardMessage(environment.dataDirs().length * shardCount, 1)), conditionalNot(containsString("testUUID"), verbose == false), conditionalNot(containsString("testIndex"), verbose == false || hasClusterState == false), conditionalNot(containsString("no name for uuid: testUUID"), verbose == false || hasClusterState) @@ -272,7 +272,7 @@ public class NodeRepurposeCommandTests extends ESTestCase { private long digestPaths() { // use a commutative digest to avoid dependency on file system order. - return Arrays.stream(environment.dataFiles()).mapToLong(this::digestPath).sum(); + return Arrays.stream(environment.dataDirs()).mapToLong(this::digestPath).sum(); } private long digestPath(Path path) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java index d12bf5dc2e34..6bcc94924d55 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java @@ -2420,6 +2420,34 @@ public class IgnoredSourceFieldMapperTests extends MapperServiceTestCase { {"outer":{"inner":[{"a.b":"a.b","a.c":"a.c"}]}}""", syntheticSource); } + public void testSingleDeepIgnoredField() throws IOException { + DocumentMapper documentMapper = createSytheticSourceMapperService(mapping(b -> { + b.startObject("top"); + b.startObject("properties"); + { + b.startObject("level1").startObject("properties"); + { + b.startObject("level2").startObject("properties"); + { + b.startObject("n") + .field("type", "integer") + .field("doc_values", "false") + .field("synthetic_source_keep", "all") + .endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + })).documentMapper(); + + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startObject("top").startObject("level1").startObject("level2").field("n", 25).endObject().endObject().endObject(); + }); + assertEquals("{\"top\":{\"level1\":{\"level2\":{\"n\":25}}}}", syntheticSource); + } + protected void validateRoundTripReader(String syntheticSource, DirectoryReader reader, DirectoryReader roundTripReader) throws IOException { // We exclude ignored source field since in some cases it contains an exact copy of a part of document source. @@ -2427,8 +2455,14 @@ public class IgnoredSourceFieldMapperTests extends MapperServiceTestCase { // and since the copy is exact, contents of ignored source are different. assertReaderEquals( "round trip " + syntheticSource, - new FieldMaskingReader(Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, IgnoredSourceFieldMapper.NAME), reader), - new FieldMaskingReader(Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, IgnoredSourceFieldMapper.NAME), roundTripReader) + new FieldMaskingReader( + Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, IgnoredSourceFieldMapper.NAME, SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME), + reader + ), + new FieldMaskingReader( + Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, IgnoredSourceFieldMapper.NAME, SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME), + roundTripReader + ) ); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index 8ad37908b2e9..70010084cdb9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -489,8 +489,13 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { MapperService mapperService = createMapperService(settings, topMapping(b -> {})); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> b.field("field1", "value1"))); - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"field1\":\"value1\"}"))); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { + // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag + assertNotNull(doc.rootDoc().getField("_recovery_source")); + assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"field1\":\"value1\"}"))); + } else { + assertNull(doc.rootDoc().getField("_recovery_source")); + } } { Settings settings = Settings.builder() @@ -521,8 +526,16 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { MapperService mapperService = createMapperService(settings, mapping(b -> {})); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> { b.field("@timestamp", "2012-02-13"); })); - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}"))); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { + // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag + assertNotNull(doc.rootDoc().getField("_recovery_source")); + assertThat( + doc.rootDoc().getField("_recovery_source").binaryValue(), + equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}")) + ); + } else { + assertNull(doc.rootDoc().getField("_recovery_source")); + } } { Settings settings = Settings.builder() @@ -715,8 +728,16 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { MapperService mapperService = createMapperService(settings, mappings); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> { b.field("@timestamp", "2012-02-13"); })); - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}"))); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { + // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag + assertNotNull(doc.rootDoc().getField("_recovery_source")); + assertThat( + doc.rootDoc().getField("_recovery_source").binaryValue(), + equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}")) + ); + } else { + assertNull(doc.rootDoc().getField("_recovery_source")); + } } { Settings settings = Settings.builder() @@ -742,11 +763,16 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { })); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source("123", b -> b.field("@timestamp", "2012-02-13").field("field", "value1"), null)); - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat( - doc.rootDoc().getField("_recovery_source").binaryValue(), - equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) - ); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { + // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag + assertNotNull(doc.rootDoc().getField("_recovery_source")); + assertThat( + doc.rootDoc().getField("_recovery_source").binaryValue(), + equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) + ); + } else { + assertNull(doc.rootDoc().getField("_recovery_source")); + } } { Settings settings = Settings.builder() @@ -790,11 +816,16 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { MapperService mapperService = createMapperService(settings, mappings); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source("123", b -> b.field("@timestamp", "2012-02-13").field("field", "value1"), null)); - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat( - doc.rootDoc().getField("_recovery_source").binaryValue(), - equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) - ); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { + // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag + assertNotNull(doc.rootDoc().getField("_recovery_source")); + assertThat( + doc.rootDoc().getField("_recovery_source").binaryValue(), + equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) + ); + } else { + assertNull(doc.rootDoc().getField("_recovery_source")); + } } { Settings settings = Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java index 40e5829b5b12..4d5eb2ea641a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.mapper.BlockLoaderTestCase; import org.elasticsearch.logsdb.datageneration.FieldType; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -28,27 +27,30 @@ public class KeywordFieldBlockLoaderTests extends BlockLoaderTestCase { @SuppressWarnings("unchecked") @Override protected Object expected(Map fieldMapping, Object value, boolean syntheticSource) { - if (value == null) { - return null; - } + var nullValue = (String) fieldMapping.get("null_value"); var ignoreAbove = fieldMapping.get("ignore_above") == null ? Integer.MAX_VALUE : ((Number) fieldMapping.get("ignore_above")).intValue(); - if (value instanceof String s) { - return convert(s, ignoreAbove); + if (value == null) { + return convert(null, nullValue, ignoreAbove); } - Function, Stream> convertValues = s -> s.map(v -> convert(v, ignoreAbove)).filter(Objects::nonNull); + if (value instanceof String s) { + return convert(s, nullValue, ignoreAbove); + } + + Function, Stream> convertValues = s -> s.map(v -> convert(v, nullValue, ignoreAbove)) + .filter(Objects::nonNull); if ((boolean) fieldMapping.getOrDefault("doc_values", false)) { // Sorted and no duplicates - var values = new HashSet<>((List) value); - var resultList = convertValues.compose(s -> values.stream().filter(Objects::nonNull).sorted()) + var resultList = convertValues.andThen(Stream::distinct) + .andThen(Stream::sorted) .andThen(Stream::toList) - .apply(values.stream()); + .apply(((List) value).stream()); return maybeFoldList(resultList); } @@ -69,9 +71,13 @@ public class KeywordFieldBlockLoaderTests extends BlockLoaderTestCase { return list; } - private BytesRef convert(String value, int ignoreAbove) { + private BytesRef convert(String value, String nullValue, int ignoreAbove) { if (value == null) { - return null; + if (nullValue != null) { + value = nullValue; + } else { + return null; + } } return value.length() <= ignoreAbove ? new BytesRef(value) : null; diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index abaab1ac8983..dd6baee60114 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -453,7 +453,7 @@ public class AnalysisModuleTests extends ESTestCase { InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff"); InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic"); Dictionary dictionary; - try (Directory tmp = newFSDirectory(environment.tmpFile())) { + try (Directory tmp = newFSDirectory(environment.tmpDir())) { dictionary = new Dictionary(tmp, "hunspell", aff, dic); } AnalysisModule module = new AnalysisModule(environment, singletonList(new AnalysisPlugin() { diff --git a/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java b/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java index 32edcc0ad82a..c0e1c1143ef4 100644 --- a/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java +++ b/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java @@ -57,7 +57,7 @@ public class InternalSettingsPreparerTests extends ESTestCase { assertEquals(defaultNodeName, settings.get("node.name")); assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings); - String configDir = env.configFile().toString(); + String configDir = env.configDir().toString(); assertTrue(configDir, configDir.startsWith(home)); assertEquals("elasticsearch", settings.get("cluster.name")); } diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java index 97158e27b852..8129f67947cf 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java @@ -52,7 +52,7 @@ public class PluginsLoaderTests extends ESTestCase { static PluginsLoader newPluginsLoader(Settings settings) { return PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(), false ); @@ -121,7 +121,7 @@ public class PluginsLoaderTests extends ESTestCase { var pluginsLoader = PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(STABLE_PLUGIN_NAME, Set.of(STABLE_PLUGIN_MODULE_NAME)), false ); @@ -182,7 +182,7 @@ public class PluginsLoaderTests extends ESTestCase { var pluginsLoader = PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(MODULAR_PLUGIN_NAME, Set.of(MODULAR_PLUGIN_MODULE_NAME)), false ); diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index e76994f69c01..57158df344a3 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -70,7 +70,7 @@ public class PluginsServiceTests extends ESTestCase { null, PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(), false ) diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 8372060ba5c3..9868e1a77c96 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -127,7 +127,7 @@ public class FileSettingsServiceTests extends ESTestCase { clusterService.getMasterService().setClusterStateSupplier(() -> clusterState); env = newEnvironment(Settings.EMPTY); - Files.createDirectories(env.configFile()); + Files.createDirectories(env.configDir()); ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); @@ -178,7 +178,7 @@ public class FileSettingsServiceTests extends ESTestCase { public void testOperatorDirName() { Path operatorPath = fileSettingsService.watchedFileDir(); - assertTrue(operatorPath.startsWith(env.configFile())); + assertTrue(operatorPath.startsWith(env.configDir())); assertTrue(operatorPath.endsWith("operator")); Path operatorSettingsFile = fileSettingsService.watchedFile(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index 8a72f8af7035..06600441b0a4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -343,10 +343,7 @@ public class CompositeValuesCollectorQueueTests extends AggregatorTestCase { final SortedDocsProducer docsProducer = sources[0].createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery()); for (LeafReaderContext leafReaderContext : reader.leaves()) { if (docsProducer != null && withProducer) { - assertEquals( - DocIdSet.EMPTY, - docsProducer.processLeaf(new MatchAllDocsQuery(), queue, leafReaderContext, false) - ); + assertEquals(DocIdSet.EMPTY, docsProducer.processLeaf(queue, leafReaderContext, false)); } else { final LeafBucketCollector leafCollector = new LeafBucketCollector() { @Override diff --git a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java index 9b56cd3bde53..cfb3cc68e035 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.io.stream.RecyclerBytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.transport.InboundDecoder.ChannelType; @@ -126,105 +125,6 @@ public class InboundDecoderTests extends ESTestCase { } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // can delete test in v9 - public void testDecodePreHeaderSizeVariableInt() throws IOException { - Compression.Scheme compressionScheme = randomFrom(Compression.Scheme.DEFLATE, Compression.Scheme.DEFLATE, null); - String action = "test-request"; - long requestId = randomNonNegativeLong(); - final TransportVersion preHeaderVariableInt = TransportHandshaker.V7_HANDSHAKE_VERSION; - final String contentValue = randomAlphaOfLength(100); - // 8.0 is only compatible with handshakes on a pre-variable int version - final OutboundMessage message = new OutboundMessage.Request( - threadContext, - new TestRequest(contentValue), - preHeaderVariableInt, - action, - requestId, - true, - compressionScheme - ); - - try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { - final BytesReference totalBytes = message.serialize(os); - int partialHeaderSize = TcpHeader.headerSize(preHeaderVariableInt); - - InboundDecoder decoder = new InboundDecoder(recycler); - final ArrayList fragments = new ArrayList<>(); - final ReleasableBytesReference releasable1 = wrapAsReleasable(totalBytes); - int bytesConsumed = decoder.decode(releasable1, fragments::add); - assertEquals(partialHeaderSize, bytesConsumed); - assertTrue(releasable1.hasReferences()); - - final Header header = (Header) fragments.get(0); - assertEquals(requestId, header.getRequestId()); - assertEquals(preHeaderVariableInt, header.getVersion()); - if (compressionScheme == null) { - assertFalse(header.isCompressed()); - } else { - assertTrue(header.isCompressed()); - } - assertTrue(header.isHandshake()); - assertTrue(header.isRequest()); - assertTrue(header.needsToReadVariableHeader()); - fragments.clear(); - - final BytesReference bytes2 = totalBytes.slice(bytesConsumed, totalBytes.length() - bytesConsumed); - final ReleasableBytesReference releasable2 = wrapAsReleasable(bytes2); - int bytesConsumed2 = decoder.decode(releasable2, fragments::add); - if (compressionScheme == null) { - assertEquals(2, fragments.size()); - } else { - assertEquals(3, fragments.size()); - final Object body = fragments.get(1); - assertThat(body, instanceOf(ReleasableBytesReference.class)); - ((ReleasableBytesReference) body).close(); - } - assertEquals(InboundDecoder.END_CONTENT, fragments.get(fragments.size() - 1)); - assertEquals(totalBytes.length() - bytesConsumed, bytesConsumed2); - } - } - - public void testDecodeHandshakeV7Compatibility() throws IOException { - String action = "test-request"; - long requestId = randomNonNegativeLong(); - final String headerKey = randomAlphaOfLength(10); - final String headerValue = randomAlphaOfLength(20); - threadContext.putHeader(headerKey, headerValue); - TransportVersion handshakeCompat = TransportHandshaker.V7_HANDSHAKE_VERSION; - OutboundMessage message = new OutboundMessage.Request( - threadContext, - new TestRequest(randomAlphaOfLength(100)), - handshakeCompat, - action, - requestId, - true, - null - ); - - try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { - final BytesReference bytes = message.serialize(os); - int totalHeaderSize = TcpHeader.headerSize(handshakeCompat); - - InboundDecoder decoder = new InboundDecoder(recycler); - final ArrayList fragments = new ArrayList<>(); - final ReleasableBytesReference releasable1 = wrapAsReleasable(bytes); - int bytesConsumed = decoder.decode(releasable1, fragments::add); - assertEquals(totalHeaderSize, bytesConsumed); - assertTrue(releasable1.hasReferences()); - - final Header header = (Header) fragments.get(0); - assertEquals(requestId, header.getRequestId()); - assertEquals(handshakeCompat, header.getVersion()); - assertFalse(header.isCompressed()); - assertTrue(header.isHandshake()); - assertTrue(header.isRequest()); - // TODO: On 9.0 this will be true because all compatible versions with contain the variable header int - assertTrue(header.needsToReadVariableHeader()); - fragments.clear(); - } - - } - public void testDecodeHandshakeV8Compatibility() throws IOException { doHandshakeCompatibilityTest(TransportHandshaker.V8_HANDSHAKE_VERSION, null); doHandshakeCompatibilityTest(TransportHandshaker.V8_HANDSHAKE_VERSION, Compression.Scheme.DEFLATE); @@ -453,46 +353,6 @@ public class InboundDecoderTests extends ESTestCase { } - public void testCompressedDecodeHandshakeCompatibility() throws IOException { - String action = "test-request"; - long requestId = randomNonNegativeLong(); - final String headerKey = randomAlphaOfLength(10); - final String headerValue = randomAlphaOfLength(20); - threadContext.putHeader(headerKey, headerValue); - TransportVersion handshakeCompat = TransportHandshaker.V7_HANDSHAKE_VERSION; - OutboundMessage message = new OutboundMessage.Request( - threadContext, - new TestRequest(randomAlphaOfLength(100)), - handshakeCompat, - action, - requestId, - true, - Compression.Scheme.DEFLATE - ); - - try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { - final BytesReference bytes = message.serialize(os); - int totalHeaderSize = TcpHeader.headerSize(handshakeCompat); - - InboundDecoder decoder = new InboundDecoder(recycler); - final ArrayList fragments = new ArrayList<>(); - final ReleasableBytesReference releasable1 = wrapAsReleasable(bytes); - int bytesConsumed = decoder.decode(releasable1, fragments::add); - assertEquals(totalHeaderSize, bytesConsumed); - assertTrue(releasable1.hasReferences()); - - final Header header = (Header) fragments.get(0); - assertEquals(requestId, header.getRequestId()); - assertEquals(handshakeCompat, header.getVersion()); - assertTrue(header.isCompressed()); - assertTrue(header.isHandshake()); - assertTrue(header.isRequest()); - // TODO: On 9.0 this will be true because all compatible versions with contain the variable header int - assertTrue(header.needsToReadVariableHeader()); - fragments.clear(); - } - } - public void testVersionIncompatibilityDecodeException() throws IOException { String action = "test-request"; long requestId = randomNonNegativeLong(); diff --git a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java index cb266c58d70d..7f5c4fbb1c4d 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java @@ -290,7 +290,9 @@ public class InboundHandlerTests extends ESTestCase { ); BytesStreamOutput byteData = new BytesStreamOutput(); TaskId.EMPTY_TASK_ID.writeTo(byteData); + // simulate bytes of a transport handshake: vInt transport version then release version string TransportVersion.writeVersion(remoteVersion, byteData); + byteData.writeString(randomIdentifier()); final InboundMessage requestMessage = new InboundMessage( requestHeader, ReleasableBytesReference.wrap(byteData.bytes()), diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java index de44ca70f200..2bac41199ab8 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.core.UpdateForV10; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -38,56 +37,6 @@ import static org.hamcrest.Matchers.lessThan; public class TransportHandshakerRawMessageTests extends ESSingleNodeTestCase { - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // remove support for v7 handshakes in v9 - public void testV7Handshake() throws Exception { - final BytesRef handshakeRequestBytes; - final var requestId = randomNonNegativeLong(); - try (var outputStream = new BytesStreamOutput()) { - outputStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - outputStream.writeLong(requestId); - outputStream.writeByte(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0))); - outputStream.writeInt(TransportHandshaker.V7_HANDSHAKE_VERSION.id()); - outputStream.writeByte((byte) 0); // no request headers; - outputStream.writeByte((byte) 0); // no response headers; - outputStream.writeStringArray(new String[] { "x-pack" }); // one feature - outputStream.writeString("internal:tcp/handshake"); - outputStream.writeByte((byte) 0); // no parent task ID; - - final var requestNodeTransportVersionId = TransportVersionUtils.randomCompatibleVersion(random()).id(); - assertThat(requestNodeTransportVersionId, allOf(greaterThanOrEqualTo(1 << 22), lessThan(1 << 28))); // 4-byte vInt - outputStream.writeByte((byte) 4); // payload length - outputStream.writeVInt(requestNodeTransportVersionId); - - handshakeRequestBytes = outputStream.bytes().toBytesRef(); - } - - final BytesRef handshakeResponseBytes; - try (var socket = openTransportConnection()) { - var streamOutput = new OutputStreamStreamOutput(socket.getOutputStream()); - streamOutput.write("ES".getBytes(StandardCharsets.US_ASCII)); - streamOutput.writeInt(handshakeRequestBytes.length); - streamOutput.writeBytes(handshakeRequestBytes.bytes, handshakeRequestBytes.offset, handshakeRequestBytes.length); - streamOutput.flush(); - - var streamInput = new InputStreamStreamInput(socket.getInputStream()); - assertEquals((byte) 'E', streamInput.readByte()); - assertEquals((byte) 'S', streamInput.readByte()); - var responseLength = streamInput.readInt(); - handshakeResponseBytes = streamInput.readBytesRef(responseLength); - } - - try (var inputStream = new BytesArray(handshakeResponseBytes).streamInput()) { - assertEquals(requestId, inputStream.readLong()); - assertEquals(TransportStatus.setResponse(TransportStatus.setHandshake((byte) 0)), inputStream.readByte()); - assertEquals(TransportHandshaker.V7_HANDSHAKE_VERSION.id(), inputStream.readInt()); - assertEquals((byte) 0, inputStream.readByte()); // no request headers - assertEquals((byte) 0, inputStream.readByte()); // no response headers - inputStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - assertEquals(TransportVersion.current().id(), inputStream.readVInt()); - assertEquals(-1, inputStream.read()); - } - } - @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // remove support for v8 handshakes in v10 public void testV8Handshake() throws Exception { final BytesRef handshakeRequestBytes; @@ -223,11 +172,10 @@ public class TransportHandshakerRawMessageTests extends ESSingleNodeTestCase { try (var inputStream = new BytesArray(handshakeRequestBytes).streamInput()) { assertThat(inputStream.readLong(), greaterThan(0L)); assertEquals(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0)), inputStream.readByte()); - assertEquals(TransportHandshaker.V8_HANDSHAKE_VERSION.id(), inputStream.readInt()); - assertEquals(0x1a, inputStream.readInt()); // length of variable-length header, always 0x1a + assertEquals(TransportHandshaker.V9_HANDSHAKE_VERSION.id(), inputStream.readInt()); + assertEquals(0x19, inputStream.readInt()); // length of variable-length header, always 0x19 assertEquals((byte) 0, inputStream.readByte()); // no request headers assertEquals((byte) 0, inputStream.readByte()); // no response headers - assertEquals((byte) 0, inputStream.readByte()); // no features assertEquals("internal:tcp/handshake", inputStream.readString()); assertEquals((byte) 0, inputStream.readByte()); // no parent task inputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); @@ -236,8 +184,9 @@ public class TransportHandshakerRawMessageTests extends ESSingleNodeTestCase { } try (var inputStream = new BytesArray(payloadBytes).streamInput()) { - inputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + inputStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); assertEquals(TransportVersion.current().id(), inputStream.readVInt()); + assertEquals(Build.current().version(), inputStream.readString()); assertEquals(-1, inputStream.read()); } } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java index d260d6615765..f36a56d9237c 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java @@ -8,24 +8,32 @@ */ package org.elasticsearch.transport; +import org.apache.logging.log4j.Level; +import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLog; import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.TestThreadPool; import java.io.IOException; import java.util.Collections; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -39,8 +47,8 @@ public class TransportHandshakerTests extends ESTestCase { private TestThreadPool threadPool; private TransportHandshaker.HandshakeRequestSender requestSender; - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - private static final TransportVersion HANDSHAKE_REQUEST_VERSION = TransportHandshaker.V8_HANDSHAKE_VERSION; + @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // new handshake version required in v10 + private static final TransportVersion HANDSHAKE_REQUEST_VERSION = TransportHandshaker.V9_HANDSHAKE_VERSION; @Override public void setUp() throws Exception { @@ -93,6 +101,40 @@ public class TransportHandshakerTests extends ESTestCase { assertEquals(TransportVersion.current(), versionFuture.actionGet()); } + @TestLogging(reason = "testing WARN logging", value = "org.elasticsearch.transport.TransportHandshaker:WARN") + public void testIncompatibleHandshakeRequest() throws IOException { + TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest( + getRandomIncompatibleTransportVersion(), + randomIdentifier() + ); + BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); + bytesStreamOutput.setTransportVersion(HANDSHAKE_REQUEST_VERSION); + handshakeRequest.writeTo(bytesStreamOutput); + StreamInput input = bytesStreamOutput.bytes().streamInput(); + input.setTransportVersion(HANDSHAKE_REQUEST_VERSION); + final TestTransportChannel channel = new TestTransportChannel(ActionListener.running(() -> fail("should not complete"))); + + MockLog.assertThatLogger( + () -> assertThat( + expectThrows(IllegalStateException.class, () -> handshaker.handleHandshake(channel, randomNonNegativeLong(), input)) + .getMessage(), + allOf( + containsString("Rejecting unreadable transport handshake"), + containsString("[" + handshakeRequest.releaseVersion + "/" + handshakeRequest.transportVersion + "]"), + containsString("[" + Build.current().version() + "/" + TransportVersion.current() + "]"), + containsString("which has an incompatible wire format") + ) + ), + TransportHandshaker.class, + new MockLog.SeenEventExpectation( + "warning", + TransportHandshaker.class.getCanonicalName(), + Level.WARN, + "Rejecting unreadable transport handshake * incompatible wire format." + ) + ); + } + public void testHandshakeResponseFromOlderNode() throws Exception { final PlainActionFuture versionFuture = new PlainActionFuture<>(); final long reqId = randomNonNegativeLong(); @@ -108,6 +150,54 @@ public class TransportHandshakerTests extends ESTestCase { assertEquals(remoteVersion, versionFuture.result()); } + @TestLogging(reason = "testing WARN logging", value = "org.elasticsearch.transport.TransportHandshaker:WARN") + public void testHandshakeResponseFromOlderNodeWithPatchedProtocol() { + final PlainActionFuture versionFuture = new PlainActionFuture<>(); + final long reqId = randomNonNegativeLong(); + handshaker.sendHandshake(reqId, node, channel, SAFE_AWAIT_TIMEOUT, versionFuture); + TransportResponseHandler handler = handshaker.removeHandlerForHandshake(reqId); + + assertFalse(versionFuture.isDone()); + + final var handshakeResponse = new TransportHandshaker.HandshakeResponse( + getRandomIncompatibleTransportVersion(), + randomIdentifier() + ); + + MockLog.assertThatLogger( + () -> handler.handleResponse(handshakeResponse), + TransportHandshaker.class, + new MockLog.SeenEventExpectation( + "warning", + TransportHandshaker.class.getCanonicalName(), + Level.WARN, + "Rejecting unreadable transport handshake * incompatible wire format." + ) + ); + + assertTrue(versionFuture.isDone()); + assertThat( + expectThrows(ExecutionException.class, IllegalStateException.class, versionFuture::result).getMessage(), + allOf( + containsString("Rejecting unreadable transport handshake"), + containsString("[" + handshakeResponse.getReleaseVersion() + "/" + handshakeResponse.getTransportVersion() + "]"), + containsString("[" + Build.current().version() + "/" + TransportVersion.current() + "]"), + containsString("which has an incompatible wire format") + ) + ); + } + + private static TransportVersion getRandomIncompatibleTransportVersion() { + return randomBoolean() + // either older than MINIMUM_COMPATIBLE + ? new TransportVersion(between(1, TransportVersions.MINIMUM_COMPATIBLE.id() - 1)) + // or between MINIMUM_COMPATIBLE and current but not known + : randomValueOtherThanMany( + TransportVersion::isKnown, + () -> new TransportVersion(between(TransportVersions.MINIMUM_COMPATIBLE.id(), TransportVersion.current().id())) + ); + } + public void testHandshakeResponseFromNewerNode() throws Exception { final PlainActionFuture versionFuture = new PlainActionFuture<>(); final long reqId = randomNonNegativeLong(); @@ -133,10 +223,8 @@ public class TransportHandshakerTests extends ESTestCase { verify(requestSender).sendRequest(node, channel, reqId, HANDSHAKE_REQUEST_VERSION); - TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest( - TransportVersion.current(), - randomIdentifier() - ); + final var buildVersion = randomIdentifier(); + final var handshakeRequest = new TransportHandshaker.HandshakeRequest(TransportVersion.current(), buildVersion); BytesStreamOutput currentHandshakeBytes = new BytesStreamOutput(); currentHandshakeBytes.setTransportVersion(HANDSHAKE_REQUEST_VERSION); handshakeRequest.writeTo(currentHandshakeBytes); @@ -145,17 +233,27 @@ public class TransportHandshakerTests extends ESTestCase { BytesStreamOutput futureHandshake = new BytesStreamOutput(); TaskId.EMPTY_TASK_ID.writeTo(lengthCheckingHandshake); TaskId.EMPTY_TASK_ID.writeTo(futureHandshake); + final var extraDataSize = between(0, 1024); try (BytesStreamOutput internalMessage = new BytesStreamOutput()) { Version.writeVersion(Version.CURRENT, internalMessage); + internalMessage.writeString(buildVersion); lengthCheckingHandshake.writeBytesReference(internalMessage.bytes()); - internalMessage.write(new byte[1024]); + internalMessage.write(new byte[extraDataSize]); futureHandshake.writeBytesReference(internalMessage.bytes()); } StreamInput futureHandshakeStream = futureHandshake.bytes().streamInput(); // We check that the handshake we serialize for this test equals the actual request. // Otherwise, we need to update the test. assertEquals(currentHandshakeBytes.bytes().length(), lengthCheckingHandshake.bytes().length()); - assertEquals(1031, futureHandshakeStream.available()); + final var expectedInternalMessageSize = 4 /* transport version id */ + + (1 + buildVersion.length()) /* length prefixed release version string */ + + extraDataSize; + assertEquals( + 1 /* EMPTY_TASK_ID */ + + (expectedInternalMessageSize < 0x80 ? 1 : 2) /* internalMessage size vInt */ + + expectedInternalMessageSize /* internalMessage */, + futureHandshakeStream.available() + ); final PlainActionFuture responseFuture = new PlainActionFuture<>(); final TestTransportChannel channel = new TestTransportChannel(responseFuture); handshaker.handleHandshake(channel, reqId, futureHandshakeStream); @@ -166,43 +264,6 @@ public class TransportHandshakerTests extends ESTestCase { assertEquals(TransportVersion.current(), response.getTransportVersion()); } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // v7 handshakes are not supported in v9 - public void testReadV7HandshakeRequest() throws IOException { - final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); - - final var requestPayloadStreamOutput = new BytesStreamOutput(); - requestPayloadStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - requestPayloadStreamOutput.writeVInt(transportVersion.id()); - - final var requestBytesStreamOutput = new BytesStreamOutput(); - requestBytesStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - TaskId.EMPTY_TASK_ID.writeTo(requestBytesStreamOutput); - requestBytesStreamOutput.writeBytesReference(requestPayloadStreamOutput.bytes()); - - final var requestBytesStream = requestBytesStreamOutput.bytes().streamInput(); - requestBytesStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - final var handshakeRequest = new TransportHandshaker.HandshakeRequest(requestBytesStream); - - assertEquals(transportVersion, handshakeRequest.transportVersion); - assertEquals(transportVersion.toReleaseVersion(), handshakeRequest.releaseVersion); - } - - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // v7 handshakes are not supported in v9 - public void testReadV7HandshakeResponse() throws IOException { - final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); - - final var responseBytesStreamOutput = new BytesStreamOutput(); - responseBytesStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - responseBytesStreamOutput.writeVInt(transportVersion.id()); - - final var responseBytesStream = responseBytesStreamOutput.bytes().streamInput(); - responseBytesStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - final var handshakeResponse = new TransportHandshaker.HandshakeResponse(responseBytesStream); - - assertEquals(transportVersion, handshakeResponse.getTransportVersion()); - assertEquals(transportVersion.toReleaseVersion(), handshakeResponse.getReleaseVersion()); - } - public void testReadV8HandshakeRequest() throws IOException { final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); diff --git a/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java b/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java index c3965547abb5..1c9cb4c9afc0 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java @@ -20,50 +20,8 @@ import java.util.Map; public class TransportStatsTests extends ESTestCase { public void testToXContent() { - assertEquals( - Strings.toString( - new TransportStats(1, 2, 3, ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), new long[0], new long[0], Map.of()), - false, - true - ), - """ - {"transport":{"server_open":1,"total_outbound_connections":2,\ - "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ - "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456\ - }}""" - ); - final var histogram = new long[HandlingTimeTracker.BUCKET_COUNT]; - assertEquals( - Strings.toString( - new TransportStats(1, 2, 3, ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), histogram, histogram, Map.of()), - false, - true - ), - """ - {"transport":{"server_open":1,"total_outbound_connections":2,\ - "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ - "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456,\ - "inbound_handling_time_histogram":[],\ - "outbound_handling_time_histogram":[]\ - }}""" - ); - histogram[4] = 10; - assertEquals( - Strings.toString( - new TransportStats(1, 2, 3, ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), histogram, histogram, Map.of()), - false, - true - ), - """ - {"transport":{"server_open":1,"total_outbound_connections":2,\ - "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ - "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456,\ - "inbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}],\ - "outbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}]\ - }}""" - ); final var requestSizeHistogram = new long[29]; requestSizeHistogram[2] = 9; @@ -84,8 +42,8 @@ public class TransportStatsTests extends ESTestCase { ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), - new long[0], - new long[0], + histogram, + histogram, Map.of("internal:test/action", exampleActionStats) ), false, @@ -95,6 +53,8 @@ public class TransportStatsTests extends ESTestCase { {"transport":{"server_open":1,"total_outbound_connections":2,\ "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456,\ + "inbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}],\ + "outbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}],\ "actions":{"internal:test/action":%s}}}""", Strings.toString(exampleActionStats, false, true)) ); } diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index d0b06942c134..79e28d955c6f 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -89,8 +89,7 @@ public class HeapAttackIT extends ESRestTestCase { */ public void testSortByManyLongsSuccess() throws IOException { initManyLongs(); - Response response = sortByManyLongs(500); - Map map = responseAsMap(response); + Map response = sortByManyLongs(500); ListMatcher columns = matchesList().item(matchesMap().entry("name", "a").entry("type", "long")) .item(matchesMap().entry("name", "b").entry("type", "long")); ListMatcher values = matchesList(); @@ -99,7 +98,7 @@ public class HeapAttackIT extends ESRestTestCase { values = values.item(List.of(0, b)); } } - assertResultMap(map, columns, values); + assertResultMap(response, columns, values); } /** @@ -107,7 +106,8 @@ public class HeapAttackIT extends ESRestTestCase { */ public void testSortByManyLongsTooMuchMemory() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> sortByManyLongs(5000)); + // 5000 is plenty to break on most nodes + assertCircuitBreaks(attempt -> sortByManyLongs(attempt * 5000)); } /** @@ -191,26 +191,42 @@ public class HeapAttackIT extends ESRestTestCase { ); } - private void assertCircuitBreaks(ThrowingRunnable r) throws IOException { - ResponseException e = expectThrows(ResponseException.class, r); - Map map = responseAsMap(e.getResponse()); - logger.info("expected circuit breaker {}", map); - assertMap( - map, + private static final int MAX_ATTEMPTS = 5; + + interface TryCircuitBreaking { + Map attempt(int attempt) throws IOException; + } + + private void assertCircuitBreaks(TryCircuitBreaking tryBreaking) throws IOException { + assertCircuitBreaks( + tryBreaking, matchesMap().entry("status", 429).entry("error", matchesMap().extraOk().entry("type", "circuit_breaking_exception")) ); } - private void assertFoldCircuitBreaks(ThrowingRunnable r) throws IOException { - ResponseException e = expectThrows(ResponseException.class, r); - Map map = responseAsMap(e.getResponse()); - logger.info("expected fold circuit breaking {}", map); - assertMap( - map, + private void assertFoldCircuitBreaks(TryCircuitBreaking tryBreaking) throws IOException { + assertCircuitBreaks( + tryBreaking, matchesMap().entry("status", 400).entry("error", matchesMap().extraOk().entry("type", "fold_too_much_memory_exception")) ); } + private void assertCircuitBreaks(TryCircuitBreaking tryBreaking, MapMatcher responseMatcher) throws IOException { + int attempt = 1; + while (attempt <= MAX_ATTEMPTS) { + try { + Map response = tryBreaking.attempt(attempt); + logger.warn("{}: should circuit broken but got {}", attempt, response); + attempt++; + } catch (ResponseException e) { + Map map = responseAsMap(e.getResponse()); + assertMap(map, responseMatcher); + return; + } + } + fail("giving up circuit breaking after " + attempt + " attempts"); + } + private void assertParseFailure(ThrowingRunnable r) throws IOException { ResponseException e = expectThrows(ResponseException.class, r); Map map = responseAsMap(e.getResponse()); @@ -218,9 +234,9 @@ public class HeapAttackIT extends ESRestTestCase { assertMap(map, matchesMap().entry("status", 400).entry("error", matchesMap().extraOk().entry("type", "parsing_exception"))); } - private Response sortByManyLongs(int count) throws IOException { + private Map sortByManyLongs(int count) throws IOException { logger.info("sorting by {} longs", count); - return query(makeSortByManyLongs(count).toString(), null); + return responseAsMap(query(makeSortByManyLongs(count).toString(), null)); } private StringBuilder makeSortByManyLongs(int count) { @@ -318,8 +334,7 @@ public class HeapAttackIT extends ESRestTestCase { public void testManyConcat() throws IOException { int strings = 300; initManyLongs(); - Response resp = manyConcat("FROM manylongs", strings); - assertManyStrings(resp, strings); + assertManyStrings(manyConcat("FROM manylongs", strings), strings); } /** @@ -327,7 +342,8 @@ public class HeapAttackIT extends ESRestTestCase { */ public void testHugeManyConcat() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> manyConcat("FROM manylongs", 2000)); + // 2000 is plenty to break on most nodes + assertCircuitBreaks(attempt -> manyConcat("FROM manylongs", attempt * 2000)); } /** @@ -335,18 +351,18 @@ public class HeapAttackIT extends ESRestTestCase { */ public void testManyConcatFromRow() throws IOException { int strings = 2000; - Response resp = manyConcat("ROW a=9999, b=9999, c=9999, d=9999, e=9999", strings); - assertManyStrings(resp, strings); + assertManyStrings(manyConcat("ROW a=9999, b=9999, c=9999, d=9999, e=9999", strings), strings); } /** * Hits a circuit breaker by building many moderately long strings. */ public void testHugeManyConcatFromRow() throws IOException { + // 5000 is plenty to break on most nodes assertFoldCircuitBreaks( - () -> manyConcat( + attempt -> manyConcat( "ROW a=9999999999999, b=99999999999999999, c=99999999999999999, d=99999999999999999, e=99999999999999999", - 5000 + attempt * 5000 ) ); } @@ -361,7 +377,7 @@ public class HeapAttackIT extends ESRestTestCase { /** * Tests that generate many moderately long strings. */ - private Response manyConcat(String init, int strings) throws IOException { + private Map manyConcat(String init, int strings) throws IOException { StringBuilder query = startQuery(); query.append(init).append(" | EVAL str = CONCAT("); query.append( @@ -388,7 +404,7 @@ public class HeapAttackIT extends ESRestTestCase { query.append("str").append(s); } query.append("\"}"); - return query(query.toString(), "columns"); + return responseAsMap(query(query.toString(), "columns")); } /** @@ -397,8 +413,7 @@ public class HeapAttackIT extends ESRestTestCase { public void testManyRepeat() throws IOException { int strings = 30; initManyLongs(); - Response resp = manyRepeat("FROM manylongs", strings); - assertManyStrings(resp, 30); + assertManyStrings(manyRepeat("FROM manylongs", strings), 30); } /** @@ -406,7 +421,8 @@ public class HeapAttackIT extends ESRestTestCase { */ public void testHugeManyRepeat() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> manyRepeat("FROM manylongs", 75)); + // 75 is plenty to break on most nodes + assertCircuitBreaks(attempt -> manyRepeat("FROM manylongs", attempt * 75)); } /** @@ -414,15 +430,15 @@ public class HeapAttackIT extends ESRestTestCase { */ public void testManyRepeatFromRow() throws IOException { int strings = 300; - Response resp = manyRepeat("ROW a = 99", strings); - assertManyStrings(resp, strings); + assertManyStrings(manyRepeat("ROW a = 99", strings), strings); } /** * Hits a circuit breaker by building many moderately long strings. */ public void testHugeManyRepeatFromRow() throws IOException { - assertFoldCircuitBreaks(() -> manyRepeat("ROW a = 99", 400)); + // 400 is enough to break on most nodes + assertFoldCircuitBreaks(attempt -> manyRepeat("ROW a = 99", attempt * 400)); } /** @@ -435,7 +451,7 @@ public class HeapAttackIT extends ESRestTestCase { /** * Tests that generate many moderately long strings. */ - private Response manyRepeat(String init, int strings) throws IOException { + private Map manyRepeat(String init, int strings) throws IOException { StringBuilder query = startQuery(); query.append(init).append(" | EVAL str = TO_STRING(a)"); for (int s = 0; s < strings; s++) { @@ -449,23 +465,21 @@ public class HeapAttackIT extends ESRestTestCase { query.append("str").append(s); } query.append("\"}"); - return query(query.toString(), "columns"); + return responseAsMap(query(query.toString(), "columns")); } - private void assertManyStrings(Response resp, int strings) throws IOException { - Map map = responseAsMap(resp); + private void assertManyStrings(Map resp, int strings) throws IOException { ListMatcher columns = matchesList(); for (int s = 0; s < strings; s++) { columns = columns.item(matchesMap().entry("name", "str" + s).entry("type", "keyword")); } MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns)); + assertMap(resp, mapMatcher.entry("columns", columns)); } public void testManyEval() throws IOException { initManyLongs(); - Response resp = manyEval(1); - Map map = responseAsMap(resp); + Map response = manyEval(1); ListMatcher columns = matchesList(); columns = columns.item(matchesMap().entry("name", "a").entry("type", "long")); columns = columns.item(matchesMap().entry("name", "b").entry("type", "long")); @@ -475,15 +489,16 @@ public class HeapAttackIT extends ESRestTestCase { for (int i = 0; i < 20; i++) { columns = columns.item(matchesMap().entry("name", "i0" + i).entry("type", "long")); } - assertResultMap(map, columns, hasSize(10_000)); + assertResultMap(response, columns, hasSize(10_000)); } public void testTooManyEval() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> manyEval(490)); + // 490 is plenty to fail on most nodes + assertCircuitBreaks(attempt -> manyEval(attempt * 490)); } - private Response manyEval(int evalLines) throws IOException { + private Map manyEval(int evalLines) throws IOException { StringBuilder query = startQuery(); query.append("FROM manylongs"); for (int e = 0; e < evalLines; e++) { @@ -496,7 +511,7 @@ public class HeapAttackIT extends ESRestTestCase { } } query.append("\n| LIMIT 10000\"}"); - return query(query.toString(), null); + return responseAsMap(query(query.toString(), null)); } private Response query(String query, String filterPath) throws IOException { @@ -554,76 +569,75 @@ public class HeapAttackIT extends ESRestTestCase { public void testFetchManyBigFields() throws IOException { initManyBigFieldsIndex(100); - fetchManyBigFields(100); + Map response = fetchManyBigFields(100); + ListMatcher columns = matchesList(); + for (int f = 0; f < 1000; f++) { + columns = columns.item(matchesMap().entry("name", "f" + String.format(Locale.ROOT, "%03d", f)).entry("type", "keyword")); + } + assertMap(response, matchesMap().entry("columns", columns)); } public void testFetchTooManyBigFields() throws IOException { initManyBigFieldsIndex(500); - assertCircuitBreaks(() -> fetchManyBigFields(500)); + // 500 docs is plenty to circuit break on most nodes + assertCircuitBreaks(attempt -> fetchManyBigFields(attempt * 500)); } /** * Fetches documents containing 1000 fields which are {@code 1kb} each. */ - private void fetchManyBigFields(int docs) throws IOException { + private Map fetchManyBigFields(int docs) throws IOException { StringBuilder query = startQuery(); query.append("FROM manybigfields | SORT f000 | LIMIT " + docs + "\"}"); - Response response = query(query.toString(), "columns"); - Map map = responseAsMap(response); - ListMatcher columns = matchesList(); - for (int f = 0; f < 1000; f++) { - columns = columns.item(matchesMap().entry("name", "f" + String.format(Locale.ROOT, "%03d", f)).entry("type", "keyword")); - } - assertMap(map, matchesMap().entry("columns", columns)); + return responseAsMap(query(query.toString(), "columns")); } public void testAggMvLongs() throws IOException { int fieldValues = 100; initMvLongsIndex(1, 3, fieldValues); - Response response = aggMvLongs(3); - Map map = responseAsMap(response); + Map response = aggMvLongs(3); ListMatcher columns = matchesList().item(matchesMap().entry("name", "MAX(f00)").entry("type", "long")) .item(matchesMap().entry("name", "f00").entry("type", "long")) .item(matchesMap().entry("name", "f01").entry("type", "long")) .item(matchesMap().entry("name", "f02").entry("type", "long")); - assertMap(map, matchesMap().entry("columns", columns)); + assertMap(response, matchesMap().entry("columns", columns)); } public void testAggTooManyMvLongs() throws IOException { initMvLongsIndex(1, 3, 1000); - assertCircuitBreaks(() -> aggMvLongs(3)); + // 3 fields is plenty on most nodes + assertCircuitBreaks(attempt -> aggMvLongs(attempt * 3)); } - private Response aggMvLongs(int fields) throws IOException { + private Map aggMvLongs(int fields) throws IOException { StringBuilder query = startQuery(); query.append("FROM mv_longs | STATS MAX(f00) BY f00"); for (int f = 1; f < fields; f++) { query.append(", f").append(String.format(Locale.ROOT, "%02d", f)); } - return query(query.append("\"}").toString(), "columns"); + return responseAsMap(query(query.append("\"}").toString(), "columns")); } public void testFetchMvLongs() throws IOException { int fields = 100; initMvLongsIndex(100, fields, 1000); - Response response = fetchMvLongs(); - Map map = responseAsMap(response); + Map response = fetchMvLongs(); ListMatcher columns = matchesList(); for (int f = 0; f < fields; f++) { columns = columns.item(matchesMap().entry("name", String.format(Locale.ROOT, "f%02d", f)).entry("type", "long")); } - assertMap(map, matchesMap().entry("columns", columns)); + assertMap(response, matchesMap().entry("columns", columns)); } public void testFetchTooManyMvLongs() throws IOException { initMvLongsIndex(500, 100, 1000); - assertCircuitBreaks(() -> fetchMvLongs()); + assertCircuitBreaks(attempt -> fetchMvLongs()); } - private Response fetchMvLongs() throws IOException { + private Map fetchMvLongs() throws IOException { StringBuilder query = startQuery(); query.append("FROM mv_longs\"}"); - return query(query.toString(), "columns"); + return responseAsMap(query(query.toString(), "columns")); } public void testLookupExplosion() throws IOException { @@ -634,11 +648,8 @@ public class HeapAttackIT extends ESRestTestCase { } public void testLookupExplosionManyMatches() throws IOException { - assertCircuitBreaks(() -> { - // 1500, 10000 is enough locally, but some CI machines need more. - Map result = lookupExplosion(2000, 10000); - logger.error("should have failed but got {}", result); - }); + // 1500, 10000 is enough locally, but some CI machines need more. + assertCircuitBreaks(attempt -> lookupExplosion(attempt * 1500, 10000)); } public void testLookupExplosionNoFetch() throws IOException { @@ -649,10 +660,8 @@ public class HeapAttackIT extends ESRestTestCase { } public void testLookupExplosionNoFetchManyMatches() throws IOException { - assertCircuitBreaks(() -> { - Map result = lookupExplosionNoFetch(8500, 10000); - logger.error("should have failed but got {}", result); - }); + // 8500 is plenty on most nodes + assertCircuitBreaks(attempt -> lookupExplosionNoFetch(attempt * 8500, 10000)); } public void testLookupExplosionBigString() throws IOException { @@ -663,25 +672,31 @@ public class HeapAttackIT extends ESRestTestCase { } public void testLookupExplosionBigStringManyMatches() throws IOException { - assertCircuitBreaks(() -> { - // 500, 1 is enough to make it fail locally but some CI needs more - Map result = lookupExplosionBigString(800, 1); - logger.error("should have failed but got {}", result); - }); + // 500, 1 is enough to make it fail locally but some CI needs more + assertCircuitBreaks(attempt -> lookupExplosionBigString(attempt * 500, 1)); } - private Map lookupExplosion(int sensorDataCount, int lookupEntries) throws IOException { - lookupExplosionData(sensorDataCount, lookupEntries); - StringBuilder query = startQuery(); - query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(location)\"}"); - return responseAsMap(query(query.toString(), null)); + private Map lookupExplosion(int sensorDataCount, int lookupEntries) throws IOException { + try { + lookupExplosionData(sensorDataCount, lookupEntries); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(location)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + deleteIndex("sensor_data"); + } } - private Map lookupExplosionNoFetch(int sensorDataCount, int lookupEntries) throws IOException { - lookupExplosionData(sensorDataCount, lookupEntries); - StringBuilder query = startQuery(); - query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(*)\"}"); - return responseAsMap(query(query.toString(), null)); + private Map lookupExplosionNoFetch(int sensorDataCount, int lookupEntries) throws IOException { + try { + lookupExplosionData(sensorDataCount, lookupEntries); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(*)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + deleteIndex("sensor_data"); + deleteIndex("sensor_lookup"); + } } private void lookupExplosionData(int sensorDataCount, int lookupEntries) throws IOException { @@ -689,20 +704,25 @@ public class HeapAttackIT extends ESRestTestCase { initSensorLookup(lookupEntries, 1, i -> "73.9857 40.7484"); } - private Map lookupExplosionBigString(int sensorDataCount, int lookupEntries) throws IOException { - initSensorData(sensorDataCount, 1); - initSensorLookupString(lookupEntries, 1, i -> { - int target = Math.toIntExact(ByteSizeValue.ofMb(1).getBytes()); - StringBuilder str = new StringBuilder(Math.toIntExact(ByteSizeValue.ofMb(2).getBytes())); - while (str.length() < target) { - str.append("Lorem ipsum dolor sit amet, consectetur adipiscing elit."); - } - logger.info("big string is {} characters", str.length()); - return str.toString(); - }); - StringBuilder query = startQuery(); - query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(string)\"}"); - return responseAsMap(query(query.toString(), null)); + private Map lookupExplosionBigString(int sensorDataCount, int lookupEntries) throws IOException { + try { + initSensorData(sensorDataCount, 1); + initSensorLookupString(lookupEntries, 1, i -> { + int target = Math.toIntExact(ByteSizeValue.ofMb(1).getBytes()); + StringBuilder str = new StringBuilder(Math.toIntExact(ByteSizeValue.ofMb(2).getBytes())); + while (str.length() < target) { + str.append("Lorem ipsum dolor sit amet, consectetur adipiscing elit."); + } + logger.info("big string is {} characters", str.length()); + return str.toString(); + }); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(string)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + deleteIndex("sensor_data"); + deleteIndex("sensor_lookup"); + } } public void testEnrichExplosion() throws IOException { @@ -713,22 +733,25 @@ public class HeapAttackIT extends ESRestTestCase { } public void testEnrichExplosionManyMatches() throws IOException { - assertCircuitBreaks(() -> { - Map result = enrichExplosion(3000, 10000); - logger.error("should have failed but got {}", result); - }); + // 1000, 10000 is enough on most nodes + assertCircuitBreaks(attempt -> enrichExplosion(1000, attempt * 5000)); } - private Map enrichExplosion(int sensorDataCount, int lookupEntries) throws IOException { - initSensorData(sensorDataCount, 1); - initSensorEnrich(lookupEntries, 1, i -> "73.9857 40.7484"); + private Map enrichExplosion(int sensorDataCount, int lookupEntries) throws IOException { try { - StringBuilder query = startQuery(); - query.append("FROM sensor_data | ENRICH sensor ON id | STATS COUNT(*)\"}"); - return responseAsMap(query(query.toString(), null)); + initSensorData(sensorDataCount, 1); + initSensorEnrich(lookupEntries, 1, i -> "73.9857 40.7484"); + try { + StringBuilder query = startQuery(); + query.append("FROM sensor_data | ENRICH sensor ON id | STATS COUNT(*)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + Request delete = new Request("DELETE", "/_enrich/policy/sensor"); + assertMap(responseAsMap(client().performRequest(delete)), matchesMap().entry("acknowledged", true)); + } } finally { - Request delete = new Request("DELETE", "/_enrich/policy/sensor"); - assertMap(responseAsMap(client().performRequest(delete)), matchesMap().entry("acknowledged", true)); + deleteIndex("sensor_data"); + deleteIndex("sensor_lookup"); } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java index b49d10ba9c40..c3384ede3a1a 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java @@ -94,7 +94,7 @@ public class DiskUsageIntegTestCase extends ESIntegTestCase { } public TestFileStore getTestFileStore(String nodeName) { - return fileSystemProvider.getTestFileStore(internalCluster().getInstance(Environment.class, nodeName).dataFiles()[0]); + return fileSystemProvider.getTestFileStore(internalCluster().getInstance(Environment.class, nodeName).dataDirs()[0]); } protected static class TestFileStore extends FilterFileStore { diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index f6ed328d14dd..e7ff52353d4f 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -679,7 +679,7 @@ public final class DataStreamTestHelper { ).build(MapperBuilderContext.root(false, true)); ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool); Environment env = mock(Environment.class); - when(env.sharedDataFile()).thenReturn(null); + when(env.sharedDataDir()).thenReturn(null); AllocationService allocationService = mock(AllocationService.class); when(allocationService.reroute(any(ClusterState.class), any(String.class), any())).then(i -> i.getArguments()[0]); when(allocationService.getShardRoutingRoleStrategy()).thenReturn(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java index 8f5478e1181f..db8a38c63c64 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java @@ -13,82 +13,159 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; -import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.DocumentGenerator; import org.elasticsearch.logsdb.datageneration.FieldType; import org.elasticsearch.logsdb.datageneration.MappingGenerator; import org.elasticsearch.logsdb.datageneration.Template; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceHandler; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; +import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.search.fetch.StoredFieldsSpec; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Stream; public abstract class BlockLoaderTestCase extends MapperServiceTestCase { + private final FieldType fieldType; private final String fieldName; - private final Template template; private final MappingGenerator mappingGenerator; - private final FieldDataGenerator generator; + private final DocumentGenerator documentGenerator; protected BlockLoaderTestCase(FieldType fieldType) { + this.fieldType = fieldType; this.fieldName = randomAlphaOfLengthBetween(5, 10); - // Disable all dynamic mapping var specification = DataGeneratorSpecification.builder() .withFullyDynamicMapping(false) + // Disable dynamic mapping and disabled objects .withDataSourceHandlers(List.of(new DataSourceHandler() { @Override public DataSourceResponse.DynamicMappingGenerator handle(DataSourceRequest.DynamicMappingGenerator request) { return new DataSourceResponse.DynamicMappingGenerator(isObject -> false); } + + @Override + public DataSourceResponse.ObjectMappingParametersGenerator handle( + DataSourceRequest.ObjectMappingParametersGenerator request + ) { + return new DataSourceResponse.ObjectMappingParametersGenerator(HashMap::new); // just defaults + } })) .build(); - this.template = new Template(Map.of(fieldName, new Template.Leaf(fieldName, fieldType))); this.mappingGenerator = new MappingGenerator(specification); - this.generator = fieldType.generator(fieldName, specification.dataSource()); + this.documentGenerator = new DocumentGenerator(specification); } public void testBlockLoader() throws IOException { + var template = new Template(Map.of(fieldName, new Template.Leaf(fieldName, fieldType))); + runTest(template, fieldName); + } + + public void testBlockLoaderForFieldInObject() throws IOException { + int depth = randomIntBetween(0, 3); + + Map currentLevel = new HashMap<>(); + Map top = Map.of("top", new Template.Object("top", false, currentLevel)); + + var fullFieldName = new StringBuilder("top"); + int currentDepth = 0; + while (currentDepth++ < depth) { + fullFieldName.append('.').append("level").append(currentDepth); + + Map nextLevel = new HashMap<>(); + currentLevel.put("level" + currentDepth, new Template.Object("level" + currentDepth, false, nextLevel)); + currentLevel = nextLevel; + } + + fullFieldName.append('.').append(fieldName); + currentLevel.put(fieldName, new Template.Leaf(fieldName, fieldType)); + var template = new Template(top); + runTest(template, fullFieldName.toString()); + } + + private void runTest(Template template, String fieldName) throws IOException { var mapping = mappingGenerator.generate(template); var mappingXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(mapping.raw()); var syntheticSource = randomBoolean(); var mapperService = syntheticSource ? createSytheticSourceMapperService(mappingXContent) : createMapperService(mappingXContent); - var fieldValue = generator.generateValue(); + var document = documentGenerator.generate(template, mapping); + var documentXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(document); - Object blockLoaderResult = setupAndInvokeBlockLoader(mapperService, fieldValue); - Object expected = expected(mapping.lookup().get(fieldName), fieldValue, syntheticSource); + Object blockLoaderResult = setupAndInvokeBlockLoader(mapperService, documentXContent, fieldName); + Object expected = expected(mapping.lookup().get(fieldName), getFieldValue(document, fieldName), syntheticSource); assertEquals(expected, blockLoaderResult); } protected abstract Object expected(Map fieldMapping, Object value, boolean syntheticSource); - private Object setupAndInvokeBlockLoader(MapperService mapperService, Object fieldValue) throws IOException { + private Object getFieldValue(Map document, String fieldName) { + var rawValues = new ArrayList<>(); + processLevel(document, fieldName, rawValues); + + if (rawValues.size() == 1) { + return rawValues.get(0); + } + + return rawValues.stream().flatMap(v -> v instanceof List l ? l.stream() : Stream.of(v)).toList(); + } + + @SuppressWarnings("unchecked") + private void processLevel(Map level, String field, ArrayList values) { + if (field.contains(".") == false) { + var value = level.get(field); + values.add(value); + return; + } + + var nameInLevel = field.split("\\.")[0]; + var entry = level.get(nameInLevel); + if (entry instanceof Map m) { + processLevel((Map) m, field.substring(field.indexOf('.') + 1), values); + } + if (entry instanceof List l) { + for (var object : l) { + processLevel((Map) object, field.substring(field.indexOf('.') + 1), values); + } + } + } + + private Object setupAndInvokeBlockLoader(MapperService mapperService, XContentBuilder document, String fieldName) throws IOException { try (Directory directory = newDirectory()) { RandomIndexWriter iw = new RandomIndexWriter(random(), directory); - LuceneDocument doc = mapperService.documentMapper().parse(source(b -> { - b.field(fieldName); - b.value(fieldValue); - })).rootDoc(); + var source = new SourceToParse( + "1", + BytesReference.bytes(document), + XContentType.JSON, + null, + Map.of(), + true, + XContentMeteringParserDecorator.NOOP + ); + LuceneDocument doc = mapperService.documentMapper().parse(source).rootDoc(); iw.addDocument(doc); iw.close(); try (DirectoryReader reader = DirectoryReader.open(directory)) { LeafReaderContext context = reader.leaves().get(0); - return load(createBlockLoader(mapperService), context, mapperService); + return load(createBlockLoader(mapperService, fieldName), context, mapperService); } } } @@ -98,6 +175,9 @@ public abstract class BlockLoaderTestCase extends MapperServiceTestCase { var columnAtATimeReader = blockLoader.columnAtATimeReader(context); if (columnAtATimeReader != null) { var block = (TestBlock) columnAtATimeReader.read(TestBlock.factory(context.reader().numDocs()), TestBlock.docs(0)); + if (block.size() == 0) { + return null; + } return block.get(0); } @@ -119,10 +199,13 @@ public abstract class BlockLoaderTestCase extends MapperServiceTestCase { BlockLoader.Builder builder = blockLoader.builder(TestBlock.factory(context.reader().numDocs()), 1); blockLoader.rowStrideReader(context).read(0, storedFieldsLoader, builder); var block = (TestBlock) builder.build(); + if (block.size() == 0) { + return null; + } return block.get(0); } - private BlockLoader createBlockLoader(MapperService mapperService) { + private BlockLoader createBlockLoader(MapperService mapperService, String fieldName) { SearchLookup searchLookup = new SearchLookup(mapperService.mappingLookup().fieldTypesLookup()::get, null, null); return mapperService.fieldType(fieldName).blockLoader(new MappedFieldType.BlockLoaderContext() { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 459480d1d731..b62e40082683 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -884,8 +884,11 @@ public abstract class MapperServiceTestCase extends FieldTypeTestCase { throws IOException { assertReaderEquals( "round trip " + syntheticSource, - new FieldMaskingReader(SourceFieldMapper.RECOVERY_SOURCE_NAME, reader), - new FieldMaskingReader(SourceFieldMapper.RECOVERY_SOURCE_NAME, roundTripReader) + new FieldMaskingReader(Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME), reader), + new FieldMaskingReader( + Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME), + roundTripReader + ) ); } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index 2567037488f3..702145dd9a50 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -64,6 +64,9 @@ public class DefaultMappingParametersHandler implements DataSourceHandler { if (ESTestCase.randomDouble() <= 0.2) { injected.put("ignore_above", ESTestCase.randomIntBetween(1, 100)); } + if (ESTestCase.randomDouble() <= 0.2) { + injected.put("null_value", ESTestCase.randomAlphaOfLengthBetween(0, 10)); + } return injected; }; diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java index f86eb31f47cc..df26b652a806 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java @@ -288,4 +288,63 @@ interface FieldSpecificMatcher { ); } } + + class KeywordMatcher implements FieldSpecificMatcher { + private final XContentBuilder actualMappings; + private final Settings.Builder actualSettings; + private final XContentBuilder expectedMappings; + private final Settings.Builder expectedSettings; + + KeywordMatcher( + XContentBuilder actualMappings, + Settings.Builder actualSettings, + XContentBuilder expectedMappings, + Settings.Builder expectedSettings + ) { + this.actualMappings = actualMappings; + this.actualSettings = actualSettings; + this.expectedMappings = expectedMappings; + this.expectedSettings = expectedSettings; + } + + @Override + public MatchResult match( + List actual, + List expected, + Map actualMapping, + Map expectedMapping + ) { + var nullValue = actualMapping.get("null_value"); + var expectedNullValue = expectedMapping.get("null_value"); + if (Objects.equals(nullValue, expectedNullValue) == false) { + throw new IllegalStateException( + "[null_value] parameter for [keyword] field does not match between actual and expected mapping" + ); + } + + var expectedNormalized = normalize(expected, (String) nullValue); + var actualNormalized = normalize(actual, (String) nullValue); + + return actualNormalized.equals(expectedNormalized) + ? MatchResult.match() + : MatchResult.noMatch( + formatErrorMessage( + actualMappings, + actualSettings, + expectedMappings, + expectedSettings, + "Values of type [keyword] don't match after normalization, normalized " + + prettyPrintCollections(actualNormalized, expectedNormalized) + ) + ); + } + + private static Set normalize(List values, String nullValue) { + if (values == null) { + return Set.of(); + } + + return values.stream().map(v -> v == null ? nullValue : (String) v).filter(Objects::nonNull).collect(Collectors.toSet()); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java index 96b8824b76af..57c7a92bfa55 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java @@ -59,7 +59,9 @@ public class SourceMatcher extends GenericEqualsMatcher "unsigned_long", new FieldSpecificMatcher.UnsignedLongMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings), "counted_keyword", - new FieldSpecificMatcher.CountedKeywordMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings) + new FieldSpecificMatcher.CountedKeywordMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings), + "keyword", + new FieldSpecificMatcher.KeywordMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings) ); this.dynamicFieldMatcher = new DynamicFieldMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings); } diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java index 0a4c99eb8b52..74db1147f23b 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java @@ -42,16 +42,12 @@ public class MockPluginsService extends PluginsService { * @param classpathPlugins Plugins that exist in the classpath which should be loaded */ public MockPluginsService(Settings settings, Environment environment, Collection> classpathPlugins) { - super( - settings, - environment.configFile(), - new PluginsLoader(Collections.emptySet(), Collections.emptySet(), Collections.emptyMap()) - ); + super(settings, environment.configDir(), new PluginsLoader(Collections.emptySet(), Collections.emptySet(), Collections.emptyMap())); List pluginsLoaded = new ArrayList<>(); for (Class pluginClass : classpathPlugins) { - Plugin plugin = loadPlugin(pluginClass, settings, environment.configFile()); + Plugin plugin = loadPlugin(pluginClass, settings, environment.configDir()); PluginDescriptor pluginInfo = new PluginDescriptor( pluginClass.getName(), "classpath plugin", diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 3bb6dd20133e..e9f5e0cdc172 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -2288,7 +2288,7 @@ public abstract class ESIntegTestCase extends ESTestCase { */ public static Path randomRepoPath(Settings settings) { Environment environment = TestEnvironment.newEnvironment(settings); - Path[] repoFiles = environment.repoFiles(); + Path[] repoFiles = environment.repoDirs(); assert repoFiles.length > 0; Path path; do { diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 7dd702244e90..6bc1833e1036 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1808,7 +1808,7 @@ public final class InternalTestCluster extends TestCluster { .distinct() .collect(Collectors.toList()); Set configPaths = Stream.concat(currentNodes.stream(), newNodes.stream()) - .map(nac -> nac.node.getEnvironment().configFile()) + .map(nac -> nac.node.getEnvironment().configDir()) .collect(Collectors.toSet()); logger.debug("configuring discovery with {} at {}", discoveryFileContents, configPaths); for (final Path configPath : configPaths) { @@ -1822,7 +1822,7 @@ public final class InternalTestCluster extends TestCluster { } public Collection configPaths() { - return nodes.values().stream().map(nac -> nac.node.getEnvironment().configFile()).toList(); + return nodes.values().stream().map(nac -> nac.node.getEnvironment().configDir()).toList(); } private void stopNodesAndClient(NodeAndClient nodeAndClient) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 4595fbf28607..0df978fe4937 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -2758,8 +2758,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { TransportStats transportStats = serviceC.transport.getStats(); // we did a single round-trip to do the initial handshake assertEquals(1, transportStats.getRxCount()); assertEquals(1, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(55, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(60, transportStats.getTxSize().getBytes()); }); serviceC.sendRequest( connection, @@ -2773,16 +2773,16 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { TransportStats transportStats = serviceC.transport.getStats(); // request has been send assertEquals(1, transportStats.getRxCount()); assertEquals(2, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(114, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(119, transportStats.getTxSize().getBytes()); }); sendResponseLatch.countDown(); responseLatch.await(); stats = serviceC.transport.getStats(); // response has been received assertEquals(2, stats.getRxCount()); assertEquals(2, stats.getTxCount()); - assertEquals(54, stats.getRxSize().getBytes()); - assertEquals(114, stats.getTxSize().getBytes()); + assertEquals(60, stats.getRxSize().getBytes()); + assertEquals(119, stats.getTxSize().getBytes()); } finally { serviceC.close(); } @@ -2873,8 +2873,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { TransportStats transportStats = serviceC.transport.getStats(); // request has been sent assertEquals(1, transportStats.getRxCount()); assertEquals(1, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(55, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(60, transportStats.getTxSize().getBytes()); }); serviceC.sendRequest( connection, @@ -2888,8 +2888,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { TransportStats transportStats = serviceC.transport.getStats(); // request has been sent assertEquals(1, transportStats.getRxCount()); assertEquals(2, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(114, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(119, transportStats.getTxSize().getBytes()); }); sendResponseLatch.countDown(); responseLatch.await(); @@ -2904,8 +2904,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { String failedMessage = "Unexpected read bytes size. The transport exception that was received=" + exception; // 57 bytes are the non-exception message bytes that have been received. It should include the initial // handshake message and the header, version, etc bytes in the exception message. - assertEquals(failedMessage, 57 + streamOutput.bytes().length(), stats.getRxSize().getBytes()); - assertEquals(114, stats.getTxSize().getBytes()); + assertEquals(failedMessage, 63 + streamOutput.bytes().length(), stats.getRxSize().getBytes()); + assertEquals(119, stats.getTxSize().getBytes()); } finally { serviceC.close(); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java index 796d7a413997..cb5c6f998575 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.PlainShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -141,8 +140,7 @@ public class TransportForgetFollowerAction extends TransportBroadcastByNodeActio final ForgetFollowerAction.Request request, final String[] concreteIndices ) { - final GroupShardsIterator activePrimaryShards = clusterState.routingTable() - .activePrimaryShardsGrouped(concreteIndices, false); + final List activePrimaryShards = clusterState.routingTable().activePrimaryShardsGrouped(concreteIndices, false); final List shardRoutings = new ArrayList<>(); final Iterator it = activePrimaryShards.iterator(); while (it.hasNext()) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index 58ff9c65dcb8..d8503033ef3a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -411,9 +411,9 @@ public class XPackPlugin extends XPackClientPlugin } public static Path resolveConfigFile(Environment env, String name) { - Path config = env.configFile().resolve(name); + Path config = env.configDir().resolve(name); if (Files.exists(config) == false) { - Path legacyConfig = env.configFile().resolve("x-pack").resolve(name); + Path legacyConfig = env.configDir().resolve("x-pack").resolve(name); if (Files.exists(legacyConfig)) { deprecationLogger.warn( DeprecationCategory.OTHER, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/UnifiedChatCompletionException.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/UnifiedChatCompletionException.java new file mode 100644 index 000000000000..f2844e6534a9 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/UnifiedChatCompletionException.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContent; + +import java.util.Iterator; +import java.util.Locale; +import java.util.Objects; + +import static java.util.Collections.emptyIterator; +import static org.elasticsearch.ExceptionsHelper.maybeError; +import static org.elasticsearch.common.collect.Iterators.concat; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.endObject; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.startObject; + +public class UnifiedChatCompletionException extends XContentFormattedException { + + private static final Logger log = LogManager.getLogger(UnifiedChatCompletionException.class); + private final String message; + private final String type; + @Nullable + private final String code; + @Nullable + private final String param; + + public UnifiedChatCompletionException(RestStatus status, String message, String type, @Nullable String code) { + this(status, message, type, code, null); + } + + public UnifiedChatCompletionException(RestStatus status, String message, String type, @Nullable String code, @Nullable String param) { + super(message, status); + this.message = Objects.requireNonNull(message); + this.type = Objects.requireNonNull(type); + this.code = code; + this.param = param; + } + + public UnifiedChatCompletionException( + Throwable cause, + RestStatus status, + String message, + String type, + @Nullable String code, + @Nullable String param + ) { + super(message, cause, status); + this.message = Objects.requireNonNull(message); + this.type = Objects.requireNonNull(type); + this.code = code; + this.param = param; + } + + @Override + public Iterator toXContentChunked(Params params) { + return concat( + startObject(), + startObject("error"), + optionalField("code", code), + field("message", message), + optionalField("param", param), + field("type", type), + endObject(), + endObject() + ); + } + + private static Iterator field(String key, String value) { + return ChunkedToXContentHelper.chunk((b, p) -> b.field(key, value)); + } + + private static Iterator optionalField(String key, String value) { + return value != null ? ChunkedToXContentHelper.chunk((b, p) -> b.field(key, value)) : emptyIterator(); + } + + public static UnifiedChatCompletionException fromThrowable(Throwable t) { + if (ExceptionsHelper.unwrapCause(t) instanceof UnifiedChatCompletionException e) { + return e; + } else { + return maybeError(t).map(error -> { + // we should never be throwing Error, but just in case we are, rethrow it on another thread so the JVM can handle it and + // return a vague error to the user so that they at least see something went wrong but don't leak JVM details to users + ExceptionsHelper.maybeDieOnAnotherThread(error); + var e = new RuntimeException("Fatal error while streaming response. Please retry the request."); + log.error(e.getMessage(), t); + return new UnifiedChatCompletionException( + RestStatus.INTERNAL_SERVER_ERROR, + e.getMessage(), + getExceptionName(e), + RestStatus.INTERNAL_SERVER_ERROR.name().toLowerCase(Locale.ROOT) + ); + }).orElseGet(() -> { + log.atDebug().withThrowable(t).log("UnifiedChatCompletionException stack trace for debugging purposes."); + var status = ExceptionsHelper.status(t); + return new UnifiedChatCompletionException( + t, + status, + t.getMessage(), + getExceptionName(t), + status.name().toLowerCase(Locale.ROOT), + null + ); + }); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/XContentFormattedException.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/XContentFormattedException.java new file mode 100644 index 000000000000..799953d452f0 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/XContentFormattedException.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.util.Iterator; +import java.util.Objects; + +/** + * Similar to {@link org.elasticsearch.ElasticsearchWrapperException}, this will wrap an Exception to generate an xContent using + * {@link ElasticsearchException#generateFailureXContent(XContentBuilder, Params, Exception, boolean)}. + * Extends {@link ElasticsearchException} to provide REST handlers the {@link #status()} method in order to set the response header. + */ +public class XContentFormattedException extends ElasticsearchException implements ChunkedToXContent { + + public static final String X_CONTENT_PARAM = "detailedErrorsEnabled"; + private final RestStatus status; + private final Throwable cause; + + public XContentFormattedException(String message, RestStatus status) { + super(message); + this.status = Objects.requireNonNull(status); + this.cause = null; + } + + public XContentFormattedException(Throwable cause, RestStatus status) { + super(cause); + this.status = Objects.requireNonNull(status); + this.cause = cause; + } + + public XContentFormattedException(String message, Throwable cause, RestStatus status) { + super(message, cause); + this.status = Objects.requireNonNull(status); + this.cause = cause; + } + + @Override + public RestStatus status() { + return status; + } + + @Override + public Iterator toXContentChunked(Params params) { + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + Iterators.single( + (b, p) -> ElasticsearchException.generateFailureXContent( + b, + p, + cause instanceof Exception e ? e : this, + params.paramAsBoolean(X_CONTENT_PARAM, false) + ) + ), + Iterators.single((b, p) -> b.field("status", status.getStatus())), + ChunkedToXContentHelper.endObject() + ); + } + + @Override + public Iterator toXContentChunked(RestApiVersion restApiVersion, Params params) { + return ChunkedToXContent.super.toXContentChunked(restApiVersion, params); + } + + @Override + public Iterator toXContentChunkedV8(Params params) { + return ChunkedToXContent.super.toXContentChunkedV8(params); + } + + @Override + public boolean isFragment() { + return super.isFragment(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java index 1229d62dce04..a704b350dba4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java @@ -225,6 +225,7 @@ public class InternalUsers { .build() }, null, null, + new String[] {}, MetadataUtils.DEFAULT_RESERVED_METADATA, Map.of() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java index 6424136eb1a7..9a35b8f13d4c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java @@ -146,7 +146,7 @@ public class CertParsingUtils { boolean acceptNonSecurePasswords ) { final SslSettingsLoader settingsLoader = new SslSettingsLoader(settings, prefix, acceptNonSecurePasswords); - return settingsLoader.buildKeyConfig(environment.configFile()); + return settingsLoader.buildKeyConfig(environment.configDir()); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java index cb55de79342b..f9b27daa8f8c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java @@ -128,7 +128,7 @@ public final class SslSettingsLoader extends SslConfigurationLoader { } public SslConfiguration load(Environment env) { - return load(env.configFile()); + return load(env.configDir()); } public static SslConfiguration load(Settings settings, String prefix, Environment env) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java index beeded209532..6fdc739e1f89 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.project.ProjectResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -209,7 +208,7 @@ public class TransportTermsEnumAction extends HandledTransportAction shards = clusterService.operationRouting().searchShards(project, singleIndex, null, null); + List shards = clusterService.operationRouting().searchShards(project, singleIndex, null, null); for (ShardIterator copiesOfShard : shards) { ShardRouting selectedCopyOfShard = null; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java index ab6e7356a6e0..f7432a59040d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java @@ -143,7 +143,7 @@ public class XPackPluginTests extends ESTestCase { Environment mockEnvironment = mock(Environment.class); when(mockEnvironment.settings()).thenReturn(Settings.builder().build()); - when(mockEnvironment.configFile()).thenReturn(PathUtils.get("")); + when(mockEnvironment.configDir()).thenReturn(PathUtils.get("")); // ensure createComponents does not influence the results Plugin.PluginServices services = mock(Plugin.PluginServices.class); when(services.clusterService()).thenReturn(mock(ClusterService.class)); @@ -187,7 +187,7 @@ public class XPackPluginTests extends ESTestCase { }); Environment mockEnvironment = mock(Environment.class); when(mockEnvironment.settings()).thenReturn(Settings.builder().build()); - when(mockEnvironment.configFile()).thenReturn(PathUtils.get("")); + when(mockEnvironment.configDir()).thenReturn(PathUtils.get("")); Plugin.PluginServices services = mock(Plugin.PluginServices.class); when(services.clusterService()).thenReturn(mock(ClusterService.class)); when(services.threadPool()).thenReturn(mock(ThreadPool.class)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotStepTests.java index ed1cb477c30e..6e31759fd6c2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotStepTests.java @@ -164,7 +164,6 @@ public class WaitForSnapshotStepTests extends AbstractStepTestCase[] { configurationSettings.x509KeyPair.legacyKeystorePassword }); } @@ -263,7 +263,7 @@ public class SslSettingsLoaderTests extends ESTestCase { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -279,7 +279,7 @@ public class SslSettingsLoaderTests extends ESTestCase { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); assertSettingDeprecationsAndWarnings( new Setting[] { @@ -298,7 +298,7 @@ public class SslSettingsLoaderTests extends ESTestCase { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("xpack/tls/path.jks", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("xpack/tls/path.jks", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -314,7 +314,7 @@ public class SslSettingsLoaderTests extends ESTestCase { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig(path, PASSWORD, "PKCS12", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig(path, PASSWORD, "PKCS12", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -328,7 +328,7 @@ public class SslSettingsLoaderTests extends ESTestCase { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("xpack/tls/path.foo", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("xpack/tls/path.foo", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -347,10 +347,7 @@ public class SslSettingsLoaderTests extends ESTestCase { SslConfiguration sslConfiguration = getSslConfiguration(settings); assertThat(sslConfiguration.keyConfig(), instanceOf(StoreKeyConfig.class)); StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); - assertThat( - ksKeyInfo, - equalTo(new StoreKeyConfig(path, PASSWORD, type, null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) - ); + assertThat(ksKeyInfo, equalTo(new StoreKeyConfig(path, PASSWORD, type, null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir()))); } public void testThatEmptySettingsAreEqual() { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/reindex-data-stream-pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/reindex-data-stream-pipeline.json new file mode 100644 index 000000000000..e8c335213170 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/reindex-data-stream-pipeline.json @@ -0,0 +1,16 @@ +{ + "description": "This pipeline sanitizes documents that are being reindexed into a data stream using the reindex data stream API. It is an internal pipeline and should not be modified.", + "processors": [ + { + "set": { + "field": "@timestamp", + "value": 0, + "override": false + } + } + ], + "_meta": { + "managed": true + }, + "version": ${xpack.migrate.reindex.pipeline.version} +} diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java index e4ddae133a09..64027e9cbb4d 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.project.ProjectResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -37,6 +36,7 @@ import org.elasticsearch.xpack.core.downsample.DownsampleShardTask; import java.io.IOException; import java.util.Arrays; +import java.util.List; import java.util.concurrent.atomic.AtomicReferenceArray; /** @@ -83,18 +83,13 @@ public class TransportDownsampleIndexerAction extends TransportBroadcastAction< } @Override - protected GroupShardsIterator shards( - ClusterState clusterState, - DownsampleIndexerAction.Request request, - String[] concreteIndices - ) { + protected List shards(ClusterState clusterState, DownsampleIndexerAction.Request request, String[] concreteIndices) { if (concreteIndices.length > 1) { throw new IllegalArgumentException("multiple indices: " + Arrays.toString(concreteIndices)); } ProjectState project = projectResolver.getProjectState(clusterState); - final GroupShardsIterator groups = clusterService.operationRouting() - .searchShards(project, concreteIndices, null, null); + final List groups = clusterService.operationRouting().searchShards(project, concreteIndices, null, null); for (ShardIterator group : groups) { // fails fast if any non-active groups if (group.size() == 0) { diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java index a49d38a019bf..c273c39d216f 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java @@ -26,10 +26,8 @@ import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ProjectState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.project.ProjectResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.Preference; -import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -219,10 +217,9 @@ public class EnrichShardMultiSearchAction extends ActionType result = clusterService.operationRouting() - .searchShards(project, new String[] { index }, null, Preference.LOCAL.type()); - return result.get(0); + return clusterService.operationRouting() + .searchShards(project, new String[] { index }, null, Preference.LOCAL.type()) + .getFirst(); } @Override diff --git a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java index 444dbcc1b9e5..794baf175920 100644 --- a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java +++ b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java @@ -37,11 +37,6 @@ import java.lang.annotation.Target; * are ever collected. *

*

- * The generation code will also look for a method called {@code combineValueCount} - * which is called once per received block with a count of values. NOTE: We may - * not need this after we convert AVG into a composite operation. - *

- *

* The generation code also looks for the optional methods {@code combineIntermediate} * and {@code evaluateFinal} which are used to combine intermediate states and * produce the final output. If the first is missing then the generated code will diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java index 46881bf337c8..c62dc9ed24d8 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java @@ -78,7 +78,6 @@ public class AggregatorImplementer { private final List warnExceptions; private final ExecutableElement init; private final ExecutableElement combine; - private final ExecutableElement combineValueCount; private final ExecutableElement combineIntermediate; private final ExecutableElement evaluateFinal; private final ClassName implementation; @@ -115,7 +114,6 @@ public class AggregatorImplementer { TypeName firstParamType = TypeName.get(e.getParameters().get(0).asType()); return firstParamType.isPrimitive() || firstParamType.toString().equals(stateType.toString()); }); - this.combineValueCount = findMethod(declarationType, "combineValueCount"); this.combineIntermediate = findMethod(declarationType, "combineIntermediate"); this.evaluateFinal = findMethod(declarationType, "evaluateFinal"); this.createParameters = init.getParameters() @@ -415,9 +413,6 @@ public class AggregatorImplementer { combineRawInput(builder, "vector"); } builder.endControlFlow(); - if (combineValueCount != null) { - builder.addStatement("$T.combineValueCount(state, vector.getPositionCount())", declarationType); - } return builder.build(); } @@ -459,9 +454,6 @@ public class AggregatorImplementer { } } builder.endControlFlow(); - if (combineValueCount != null) { - builder.addStatement("$T.combineValueCount(state, block.getTotalValueCount())", declarationType); - } return builder.build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java index 63318a218990..ca89e6f99964 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java @@ -10,6 +10,7 @@ package org.elasticsearch.compute.data.sort; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public class DoubleBucketedSort implements Releasable { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ public class DoubleBucketedSort implements Releasable { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.DOUBLE_PAGE_SIZE, Double.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java index b490fe193c33..2bf8edd99f48 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java @@ -10,6 +10,7 @@ package org.elasticsearch.compute.data.sort; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.FloatArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public class FloatBucketedSort implements Releasable { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ public class FloatBucketedSort implements Releasable { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.FLOAT_PAGE_SIZE, Float.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java index 04a635d75fe5..257dfe2ebb0b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java @@ -10,6 +10,7 @@ package org.elasticsearch.compute.data.sort; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public class IntBucketedSort implements Releasable { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ public class IntBucketedSort implements Releasable { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.INT_PAGE_SIZE, Integer.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java index e08c25256944..c27467ebb60f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java @@ -10,6 +10,7 @@ package org.elasticsearch.compute.data.sort; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public class LongBucketedSort implements Releasable { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ public class LongBucketedSort implements Releasable { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.LONG_PAGE_SIZE, Long.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java index 6dca94b9bc79..63d79a919862 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java @@ -8,10 +8,12 @@ package org.elasticsearch.compute.data.sort; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -29,6 +31,11 @@ import java.util.stream.LongStream; /** * Aggregates the top N variable length {@link BytesRef} values per bucket. * See {@link BucketedSort} for more information. + *

+ * This is substantially different from {@link IpBucketedSort} because + * this has to handle variable length byte strings. To do that it allocates + * a heap of {@link BreakingBytesRefBuilder}s. + *

*/ public class BytesRefBucketedSort implements Releasable { private final BucketedSortCommon common; @@ -123,7 +130,7 @@ public class BytesRefBucketedSort implements Releasable { // Gathering mode long requiredSize = common.endIndex(rootIndex); if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); common.assertValidNextOffset(next); @@ -271,13 +278,23 @@ public class BytesRefBucketedSort implements Releasable { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long requiredSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = common.bigArrays.grow(values, requiredSize); + assert oldMax % common.bucketSize == 0; + + long newSize = BigArrays.overSize( + ((long) bucket + 1) * common.bucketSize, + PageCacheRecycler.OBJECT_PAGE_SIZE, + RamUsageEstimator.NUM_BYTES_OBJECT_REF + ); + // Round up to the next full bucket. + newSize = (newSize + common.bucketSize - 1) / common.bucketSize; + values = common.bigArrays.resize(values, newSize * common.bucketSize); // Set the next gather offsets for all newly allocated buckets. - fillGatherOffsets(oldMax - (oldMax % common.bucketSize)); + fillGatherOffsets(oldMax); } /** @@ -296,6 +313,7 @@ public class BytesRefBucketedSort implements Releasable { bytes.grow(Integer.BYTES); bytes.setLength(Integer.BYTES); ByteUtils.writeIntLE(nextOffset, bytes.bytes(), 0); + checkInvariant(Math.toIntExact(bucketRoot / common.bucketSize)); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java index 4eb31ea30db2..4392d3994886 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java @@ -11,6 +11,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.common.util.ByteUtils; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -26,6 +27,11 @@ import java.util.stream.IntStream; /** * Aggregates the top N IP values per bucket. * See {@link BucketedSort} for more information. + *

+ * This is substantially different from {@link BytesRefBucketedSort} because + * this takes advantage of IPs having a fixed length and allocates a dense + * storage for them. + *

*/ public class IpBucketedSort implements Releasable { private static final int IP_LENGTH = 16; // Bytes. It's ipv6. @@ -101,7 +107,7 @@ public class IpBucketedSort implements Releasable { // Gathering mode long requiredSize = common.endIndex(rootIndex) * IP_LENGTH; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); common.assertValidNextOffset(next); @@ -268,17 +274,23 @@ public class IpBucketedSort implements Releasable { * Allocate storage for more buckets and store the "next gather offset" * for those new buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size() / IP_LENGTH; - values = common.bigArrays.grow(values, minSize); + assert oldMax % common.bucketSize == 0; + + int bucketBytes = common.bucketSize * IP_LENGTH; + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketBytes, PageCacheRecycler.BYTE_PAGE_SIZE, 1); + // Round up to the next full bucket. + newSize = (newSize + bucketBytes - 1) / bucketBytes; + values = common.bigArrays.resize(values, newSize * bucketBytes); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % common.bucketSize)); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = common.bucketSize - 1; for (long bucketRoot = startingAt; bucketRoot < values.size() / IP_LENGTH; bucketRoot += common.bucketSize) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st index 6587743e34b6..095d48021e9c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st @@ -10,6 +10,7 @@ package org.elasticsearch.compute.data.sort; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.$Type$Array; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public class $Type$BucketedSort implements Releasable { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -261,19 +262,25 @@ $endif$ /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.$TYPE$_PAGE_SIZE, $BYTES$); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index 78572f55cd5e..c0d220fda5d4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -52,6 +52,13 @@ public class Driver implements Releasable, Describable { private final String sessionId; + /** + * Description of the task this driver is running. This description should be + * short and meaningful as a grouping identifier. We use the phase of the + * query right now: "data", "node_reduce", "final". + */ + private final String taskDescription; + /** * The wall clock time when this driver was created in milliseconds since epoch. * Compared to {@link #startNanos} this is less accurate and is measured by a @@ -96,6 +103,10 @@ public class Driver implements Releasable, Describable { /** * Creates a new driver with a chain of operators. * @param sessionId session Id + * @param taskDescription Description of the task this driver is running. This + * description should be short and meaningful as a grouping + * identifier. We use the phase of the query right now: + * "data", "node_reduce", "final". * @param driverContext the driver context * @param source source operator * @param intermediateOperators the chain of operators to execute @@ -105,6 +116,7 @@ public class Driver implements Releasable, Describable { */ public Driver( String sessionId, + String taskDescription, long startTime, long startNanos, DriverContext driverContext, @@ -116,6 +128,7 @@ public class Driver implements Releasable, Describable { Releasable releasable ) { this.sessionId = sessionId; + this.taskDescription = taskDescription; this.startTime = startTime; this.startNanos = startNanos; this.driverContext = driverContext; @@ -129,6 +142,7 @@ public class Driver implements Releasable, Describable { this.status = new AtomicReference<>( new DriverStatus( sessionId, + taskDescription, startTime, System.currentTimeMillis(), 0, @@ -150,6 +164,7 @@ public class Driver implements Releasable, Describable { * @param releasable a {@link Releasable} to invoked once the chain of operators has run to completion */ public Driver( + String taskDescription, DriverContext driverContext, SourceOperator source, List intermediateOperators, @@ -158,6 +173,7 @@ public class Driver implements Releasable, Describable { ) { this( "unset", + taskDescription, System.currentTimeMillis(), System.nanoTime(), driverContext, @@ -485,6 +501,7 @@ public class Driver implements Releasable, Describable { throw new IllegalStateException("can only get profile from finished driver"); } return new DriverProfile( + status.taskDescription(), status.started(), status.lastUpdated(), finishNanos - startNanos, @@ -531,6 +548,7 @@ public class Driver implements Releasable, Describable { return new DriverStatus( sessionId, + taskDescription, startTime, now, prev.cpuNanos() + extraCpuNanos, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java index 59ecdde23041..38fb298a7cff 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java @@ -27,6 +27,13 @@ import java.util.Objects; * Profile results from a single {@link Driver}. */ public class DriverProfile implements Writeable, ChunkedToXContentObject { + /** + * Description of the task this driver is running. This description should be + * short and meaningful as a grouping identifier. We use the phase of the + * query right now: "data", "node_reduce", "final". + */ + private final String taskDescription; + /** * Millis since epoch when the driver started. */ @@ -62,6 +69,7 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { private final DriverSleeps sleeps; public DriverProfile( + String taskDescription, long startMillis, long stopMillis, long tookNanos, @@ -70,6 +78,7 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { List operators, DriverSleeps sleeps ) { + this.taskDescription = taskDescription; this.startMillis = startMillis; this.stopMillis = stopMillis; this.tookNanos = tookNanos; @@ -80,6 +89,7 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { } public DriverProfile(StreamInput in) throws IOException { + this.taskDescription = in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION) ? in.readString() : ""; if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.startMillis = in.readVLong(); this.stopMillis = in.readVLong(); @@ -102,6 +112,9 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { @Override public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION)) { + out.writeString(taskDescription); + } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVLong(startMillis); out.writeVLong(stopMillis); @@ -115,6 +128,13 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { sleeps.writeTo(out); } + /** + * Description of the task this driver is running. + */ + public String taskDescription() { + return taskDescription; + } + /** * Millis since epoch when the driver started. */ @@ -169,6 +189,7 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { @Override public Iterator toXContentChunked(ToXContent.Params params) { return Iterators.concat(ChunkedToXContentHelper.startObject(), Iterators.single((b, p) -> { + b.field("task_description", taskDescription); b.timestampFieldsFromUnixEpochMillis("start_millis", "start", startMillis); b.timestampFieldsFromUnixEpochMillis("stop_millis", "stop", stopMillis); b.field("took_nanos", tookNanos); @@ -197,7 +218,8 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { return false; } DriverProfile that = (DriverProfile) o; - return startMillis == that.startMillis + return taskDescription.equals(that.taskDescription) + && startMillis == that.startMillis && stopMillis == that.stopMillis && tookNanos == that.tookNanos && cpuNanos == that.cpuNanos @@ -208,7 +230,7 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { @Override public int hashCode() { - return Objects.hash(startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); + return Objects.hash(taskDescription, startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java index 42e390823120..87537755bba3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java @@ -42,6 +42,11 @@ public class DriverStatus implements Task.Status { */ private final String sessionId; + /** + * Description of the task this driver is running. + */ + private final String taskDescription; + /** * Milliseconds since epoch when this driver started. */ @@ -83,6 +88,7 @@ public class DriverStatus implements Task.Status { DriverStatus( String sessionId, + String taskDescription, long started, long lastUpdated, long cpuTime, @@ -93,6 +99,7 @@ public class DriverStatus implements Task.Status { DriverSleeps sleeps ) { this.sessionId = sessionId; + this.taskDescription = taskDescription; this.started = started; this.lastUpdated = lastUpdated; this.cpuNanos = cpuTime; @@ -105,6 +112,7 @@ public class DriverStatus implements Task.Status { public DriverStatus(StreamInput in) throws IOException { this.sessionId = in.readString(); + this.taskDescription = in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION) ? in.readString() : ""; this.started = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readLong() : 0; this.lastUpdated = in.readLong(); this.cpuNanos = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0; @@ -122,6 +130,9 @@ public class DriverStatus implements Task.Status { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(sessionId); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION)) { + out.writeString(taskDescription); + } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) { out.writeLong(started); } @@ -150,6 +161,15 @@ public class DriverStatus implements Task.Status { return sessionId; } + /** + * Description of the task this driver is running. This description should be + * short and meaningful as a grouping identifier. We use the phase of the + * query right now: "data", "node_reduce", "final". + */ + public String taskDescription() { + return taskDescription; + } + /** * When this {@link Driver} was started. */ @@ -211,7 +231,8 @@ public class DriverStatus implements Task.Status { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("sessionId", sessionId); + builder.field("session_id", sessionId); + builder.field("task_description", taskDescription); builder.field("started", DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(started)); builder.field("last_updated", DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(lastUpdated)); builder.field("cpu_nanos", cpuNanos); @@ -240,6 +261,7 @@ public class DriverStatus implements Task.Status { if (o == null || getClass() != o.getClass()) return false; DriverStatus that = (DriverStatus) o; return sessionId.equals(that.sessionId) + && taskDescription.equals(that.taskDescription) && started == that.started && lastUpdated == that.lastUpdated && cpuNanos == that.cpuNanos @@ -252,7 +274,18 @@ public class DriverStatus implements Task.Status { @Override public int hashCode() { - return Objects.hash(sessionId, started, lastUpdated, cpuNanos, iterations, status, completedOperators, activeOperators, sleeps); + return Objects.hash( + sessionId, + taskDescription, + started, + lastUpdated, + cpuNanos, + iterations, + status, + completedOperators, + activeOperators, + sleeps + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index a5061b8cf6d3..41b319be6c5f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -123,7 +123,7 @@ public class OperatorTests extends MapperServiceTestCase { } }); DriverContext driverContext = driverContext(); - drivers.add(new Driver(driverContext, factory.get(driverContext), List.of(), docCollector, () -> {})); + drivers.add(new Driver("test", driverContext, factory.get(driverContext), List.of(), docCollector, () -> {})); } OperatorTestCase.runDriver(drivers); Set expectedDocIds = searchForDocIds(reader, query); @@ -215,6 +215,7 @@ public class OperatorTests extends MapperServiceTestCase { ) ); Driver driver = new Driver( + "test", driverContext, luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT).get(driverContext), operators, @@ -248,6 +249,7 @@ public class OperatorTests extends MapperServiceTestCase { DriverContext driverContext = driverContext(); try ( var driver = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), values, 100), List.of((new LimitOperator.Factory(limit)).get(driverContext)), @@ -335,6 +337,7 @@ public class OperatorTests extends MapperServiceTestCase { var actualPrimeOrds = new ArrayList<>(); try ( var driver = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), values, 100), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index 3eaf85c27e59..cea6b6a2a85a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -111,6 +111,7 @@ public abstract class AggregatorFunctionTestCase extends ForkingOperatorTestCase try ( Driver d = new Driver( + "test", driverContext, new NullInsertingSourceOperator(new CannedSourceOperator(input.iterator()), blockFactory), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java index 5bd9ecc931cf..67dcf4e78d13 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java @@ -65,6 +65,7 @@ public class CountDistinctIntAggregatorFunctionTests extends AggregatorFunctionT BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java index 70662efae688..b136d302ccfb 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java @@ -66,6 +66,7 @@ public class CountDistinctLongAggregatorFunctionTests extends AggregatorFunction BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java index 003dc415c619..4d94d4d2e029 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java @@ -53,6 +53,7 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(Double.MAX_VALUE - 1, 2)), List.of(simple().get(driverContext)), @@ -71,6 +72,7 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator( driverContext.blockFactory(), @@ -100,6 +102,7 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(values)), List.of(simple().get(driverContext)), @@ -122,6 +125,7 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(largeValues)), List.of(simple().get(driverContext)), @@ -141,6 +145,7 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(largeValues)), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java index 521c1e261cc6..c7a9fb75404f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java @@ -53,6 +53,7 @@ public class SumFloatAggregatorFunctionTests extends AggregatorFunctionTestCase List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(Float.MAX_VALUE - 1, 2f)), List.of(simple().get(driverContext)), @@ -71,6 +72,7 @@ public class SumFloatAggregatorFunctionTests extends AggregatorFunctionTestCase List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator( driverContext.blockFactory(), @@ -100,6 +102,7 @@ public class SumFloatAggregatorFunctionTests extends AggregatorFunctionTestCase driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(values)), List.of(simple().get(driverContext)), @@ -122,6 +125,7 @@ public class SumFloatAggregatorFunctionTests extends AggregatorFunctionTestCase driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(largeValues)), List.of(simple().get(driverContext)), @@ -141,6 +145,7 @@ public class SumFloatAggregatorFunctionTests extends AggregatorFunctionTestCase driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(largeValues)), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java index 8c5e4430128b..365b9cc75e01 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java @@ -52,6 +52,7 @@ public class SumIntAggregatorFunctionTests extends AggregatorFunctionTestCase { BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java index 00cdbedef54d..4821c72229d8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java @@ -51,6 +51,7 @@ public class SumLongAggregatorFunctionTests extends AggregatorFunctionTestCase { DriverContext driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.of(Long.MAX_VALUE - 1, 2)), List.of(simple().get(driverContext)), @@ -68,6 +69,7 @@ public class SumLongAggregatorFunctionTests extends AggregatorFunctionTestCase { BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java index 914d29bb8ba2..d0a1fc1e2959 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java @@ -416,6 +416,7 @@ public class CategorizeBlockHashTests extends BlockHashTestCase { List intermediateOutput = new ArrayList<>(); Driver driver = new Driver( + "test", driverContext, new LocalSourceOperator(input1), List.of( @@ -436,6 +437,7 @@ public class CategorizeBlockHashTests extends BlockHashTestCase { runDriver(driver); driver = new Driver( + "test", driverContext, new LocalSourceOperator(input2), List.of( @@ -458,6 +460,7 @@ public class CategorizeBlockHashTests extends BlockHashTestCase { List finalOutput = new ArrayList<>(); driver = new Driver( + "test", driverContext, new CannedSourceOperator(intermediateOutput.iterator()), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java index 5f868f51f06e..17f41e27703f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java @@ -137,6 +137,7 @@ public class CategorizePackedValuesBlockHashTests extends BlockHashTestCase { List intermediateOutput = new ArrayList<>(); Driver driver = new Driver( + "test", driverContext, new LocalSourceOperator(input1), List.of( @@ -154,6 +155,7 @@ public class CategorizePackedValuesBlockHashTests extends BlockHashTestCase { runDriver(driver); driver = new Driver( + "test", driverContext, new LocalSourceOperator(input2), List.of( @@ -173,6 +175,7 @@ public class CategorizePackedValuesBlockHashTests extends BlockHashTestCase { List finalOutput = new ArrayList<>(); driver = new Driver( + "test", driverContext, new CannedSourceOperator(intermediateOutput.iterator()), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java index 78ed096c10b3..2358643dc089 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java @@ -409,6 +409,42 @@ public abstract class BucketedSortTestCase values = randomList(buckets, buckets, this::randomValue); + Collections.sort(values); + try (T sort = build(SortOrder.ASC, bucketSize)) { + // Add a single value to the main sort. + for (int b = 0; b < buckets; b++) { + collect(sort, values.get(b), b); + } + + try (T other = build(SortOrder.ASC, bucketSize)) { + // Add *all* values to the target bucket of the secondary sort. + for (int i = 0; i < values.size(); i++) { + if (i != target) { + collect(other, values.get(i), target); + } + } + + // Merge all buckets pairwise. Most of the secondary ones are empty. + for (int b = 0; b < buckets; b++) { + merge(sort, b, other, b); + } + } + + for (int b = 0; b < buckets; b++) { + if (b == target) { + assertBlock(sort, b, values.subList(0, bucketSize)); + } else { + assertBlock(sort, b, List.of(values.get(b))); + } + } + } + } + protected void assertBlock(T sort, int groupId, List values) { var blockFactory = TestBlockFactory.getNonBreakingInstance(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java index 1f5b5bf9b933..61c7582c7424 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java @@ -151,7 +151,7 @@ public class LuceneCountOperatorTests extends AnyOperatorTestCase { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java index b65da5aba758..f6fba20a2888 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java @@ -166,7 +166,7 @@ public abstract class LuceneMaxOperatorTestCase extends AnyOperatorTestCase { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java index f57bbd8c5ddb..3033efa50f37 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java @@ -166,7 +166,7 @@ public abstract class LuceneMinOperatorTestCase extends AnyOperatorTestCase { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java index 54b33732aa42..4a628d596f14 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java @@ -210,6 +210,7 @@ public class LuceneQueryExpressionEvaluatorTests extends ComputeTestCase { operators.add(new EvalOperator(blockFactory, luceneQueryEvaluator)); List results = new ArrayList<>(); Driver driver = new Driver( + "test", driverContext, luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT, scoring).get(driverContext), operators, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index b7114bb4e9b5..574f9b25ff14 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -160,7 +160,7 @@ public class LuceneSourceOperatorTests extends AnyOperatorTestCase { List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java index 20af40bcc684..3af21ba37d08 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java @@ -127,7 +127,7 @@ public class LuceneTopNSourceOperatorScoringTests extends LuceneTopNSourceOperat List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index a6d652d499d8..92eaa78eedcd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -187,7 +187,7 @@ public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase { List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index feba401d445e..934fbcc0b897 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -174,6 +174,7 @@ public class TimeSeriesSortedSourceOperatorTests extends AnyOperatorTestCase { var metricField = new NumberFieldMapper.NumberFieldType("metric", NumberFieldMapper.NumberType.LONG); OperatorTestCase.runDriver( new Driver( + "test", driverContext, timeSeriesFactory.get(driverContext), List.of(ValuesSourceReaderOperatorTests.factory(reader, metricField, ElementType.LONG).get(driverContext)), @@ -248,6 +249,7 @@ public class TimeSeriesSortedSourceOperatorTests extends AnyOperatorTestCase { List results = new ArrayList<>(); OperatorTestCase.runDriver( new Driver( + "test", driverContext, timeSeriesFactory.get(driverContext), List.of(), @@ -306,6 +308,7 @@ public class TimeSeriesSortedSourceOperatorTests extends AnyOperatorTestCase { var hostnameField = new KeywordFieldMapper.KeywordFieldType("hostname"); OperatorTestCase.runDriver( new Driver( + "test", ctx, timeSeriesFactory.get(ctx), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java index 910541607d83..32164c7954dd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java @@ -1299,6 +1299,7 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase { var vsShardContext = new ValuesSourceReaderOperator.ShardContext(reader(indexKey), () -> SourceLoader.FROM_STORED_SOURCE); try ( Driver driver = new Driver( + "test", driverContext, luceneFactory.get(driverContext), List.of( @@ -1376,6 +1377,7 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase { int[] pages = new int[] { 0 }; try ( Driver d = new Driver( + "test", driverContext, simpleInput(driverContext, 10), List.of( @@ -1497,6 +1499,7 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase { boolean success = false; try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input), operators, @@ -1524,6 +1527,7 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase { for (int i = 0; i < dummyDrivers; i++) { drivers.add( new Driver( + "test", "dummy-session", 0, 0, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 2661ff665831..07a66a473f3b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -1307,6 +1307,7 @@ public class ValuesSourceReaderOperatorTests extends OperatorTestCase { ); try ( Driver driver = new Driver( + "test", driverContext, luceneFactory.get(driverContext), List.of( @@ -1409,6 +1410,7 @@ public class ValuesSourceReaderOperatorTests extends OperatorTestCase { int[] pages = new int[] { 0 }; try ( Driver d = new Driver( + "test", driverContext, simpleInput(driverContext.blockFactory(), 10), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index f017fed16cc9..e94864b9530b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -165,7 +165,14 @@ public class AsyncOperatorTests extends ESTestCase { } }); PlainActionFuture future = new PlainActionFuture<>(); - Driver driver = new Driver(driverContext, sourceOperator, intermediateOperators, outputOperator, () -> assertFalse(it.hasNext())); + Driver driver = new Driver( + "test", + driverContext, + sourceOperator, + intermediateOperators, + outputOperator, + () -> assertFalse(it.hasNext()) + ); Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 10000), future); future.actionGet(); Releasables.close(localBreaker); @@ -295,7 +302,7 @@ public class AsyncOperatorTests extends ESTestCase { }; SinkOperator outputOperator = new PageConsumerOperator(Page::releaseBlocks); PlainActionFuture future = new PlainActionFuture<>(); - Driver driver = new Driver(driverContext, sourceOperator, List.of(asyncOperator), outputOperator, localBreaker); + Driver driver = new Driver("test", driverContext, sourceOperator, List.of(asyncOperator), outputOperator, localBreaker); Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 1000), future); assertBusy(() -> assertTrue(future.isDone())); if (failed.get()) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java index 27083ea0fcd1..a39aa10af5f3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java @@ -27,6 +27,7 @@ import static org.hamcrest.Matchers.equalTo; public class DriverProfileTests extends AbstractWireSerializingTestCase { public void testToXContent() { DriverProfile status = new DriverProfile( + "test", 123413220000L, 123413243214L, 10012, @@ -44,6 +45,7 @@ public class DriverProfileTests extends AbstractWireSerializingTestCase startMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); - case 1 -> stopMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); - case 2 -> tookNanos = randomValueOtherThan(tookNanos, ESTestCase::randomNonNegativeLong); - case 3 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); - case 4 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); - case 5 -> operators = randomValueOtherThan(operators, DriverStatusTests::randomOperatorStatuses); - case 6 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); + switch (between(0, 7)) { + case 0 -> taskDescription = randomValueOtherThan(taskDescription, DriverStatusTests::randomTaskDescription); + case 1 -> startMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); + case 2 -> stopMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); + case 3 -> tookNanos = randomValueOtherThan(tookNanos, ESTestCase::randomNonNegativeLong); + case 4 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); + case 5 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); + case 6 -> operators = randomValueOtherThan(operators, DriverStatusTests::randomOperatorStatuses); + case 7 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); default -> throw new UnsupportedOperationException(); } - return new DriverProfile(startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); + return new DriverProfile(taskDescription, startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java index b46d9f3f4add..83deb57a3ba7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java @@ -32,6 +32,7 @@ public class DriverStatusTests extends AbstractWireSerializingTestCase sessionId = randomValueOtherThan(sessionId, this::randomSessionId); - case 1 -> started = randomValueOtherThan(started, ESTestCase::randomNonNegativeLong); - case 2 -> lastUpdated = randomValueOtherThan(lastUpdated, ESTestCase::randomNonNegativeLong); - case 3 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); - case 4 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); - case 5 -> status = randomValueOtherThan(status, this::randomStatus); - case 6 -> completedOperators = randomValueOtherThan(completedOperators, DriverStatusTests::randomOperatorStatuses); - case 7 -> activeOperators = randomValueOtherThan(activeOperators, DriverStatusTests::randomOperatorStatuses); - case 8 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); + case 1 -> taskDescription = randomValueOtherThan(taskDescription, DriverStatusTests::randomTaskDescription); + case 2 -> started = randomValueOtherThan(started, ESTestCase::randomNonNegativeLong); + case 3 -> lastUpdated = randomValueOtherThan(lastUpdated, ESTestCase::randomNonNegativeLong); + case 4 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); + case 5 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); + case 6 -> status = randomValueOtherThan(status, this::randomStatus); + case 7 -> completedOperators = randomValueOtherThan(completedOperators, DriverStatusTests::randomOperatorStatuses); + case 8 -> activeOperators = randomValueOtherThan(activeOperators, DriverStatusTests::randomOperatorStatuses); + case 9 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); default -> throw new UnsupportedOperationException(); } - return new DriverStatus(sessionId, started, lastUpdated, cpuNanos, iterations, status, completedOperators, activeOperators, sleeps); + return new DriverStatus( + sessionId, + taskDescription, + started, + lastUpdated, + cpuNanos, + iterations, + status, + completedOperators, + activeOperators, + sleeps + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java index e715b94bc55e..48a566994b2f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java @@ -67,6 +67,7 @@ public class DriverTests extends ESTestCase { Driver driver = new Driver( "unset", + "test", startEpoch, startNanos, driverContext, @@ -116,6 +117,7 @@ public class DriverTests extends ESTestCase { Driver driver = new Driver( "unset", + "test", startEpoch, startNanos, driverContext, @@ -166,6 +168,7 @@ public class DriverTests extends ESTestCase { Driver driver = new Driver( "unset", + "test", startEpoch, startNanos, driverContext, @@ -231,7 +234,7 @@ public class DriverTests extends ESTestCase { WarningsOperator warning1 = new WarningsOperator(threadPool); WarningsOperator warning2 = new WarningsOperator(threadPool); CyclicBarrier allPagesProcessed = new CyclicBarrier(2); - Driver driver = new Driver(driverContext, new CannedSourceOperator(inPages.iterator()) { + Driver driver = new Driver("test", driverContext, new CannedSourceOperator(inPages.iterator()) { @Override public Page getOutput() { assertRunningWithRegularUser(threadPool); @@ -315,7 +318,7 @@ public class DriverTests extends ESTestCase { } }); - Driver driver = new Driver(driverContext, sourceOperator, List.of(delayOperator), sinkOperator, () -> {}); + Driver driver = new Driver("test", driverContext, sourceOperator, List.of(delayOperator), sinkOperator, () -> {}); ThreadContext threadContext = threadPool.getThreadContext(); PlainActionFuture future = new PlainActionFuture<>(); @@ -336,7 +339,7 @@ public class DriverTests extends ESTestCase { var sinkHandler = new ExchangeSinkHandler(driverContext.blockFactory(), between(1, 5), System::currentTimeMillis); var sourceOperator = new ExchangeSourceOperator(sourceHandler.createExchangeSource()); var sinkOperator = new ExchangeSinkOperator(sinkHandler.createExchangeSink(() -> {}), Function.identity()); - Driver driver = new Driver(driverContext, sourceOperator, List.of(), sinkOperator, () -> {}); + Driver driver = new Driver("test", driverContext, sourceOperator, List.of(), sinkOperator, () -> {}); PlainActionFuture future = new PlainActionFuture<>(); Driver.start(threadPool.getThreadContext(), threadPool.executor("esql"), driver, between(1, 1000), future); assertBusy( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 744121a3807c..6b036dea5f74 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -68,6 +68,7 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input.iterator()), List.of(simpleWithMode(AggregatorMode.INITIAL).get(driverContext), simpleWithMode(AggregatorMode.FINAL).get(driverContext)), @@ -89,6 +90,7 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(partials.iterator()), List.of(simpleWithMode(AggregatorMode.FINAL).get(driverContext)), @@ -110,6 +112,7 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input.iterator()), List.of( @@ -142,6 +145,7 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(intermediates.iterator()), List.of(simpleWithMode(AggregatorMode.FINAL).get(driverContext)), @@ -240,6 +244,7 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { DriverContext driver1Context = driverContext(); drivers.add( new Driver( + "test", driver1Context, new CannedSourceOperator(pages.iterator()), List.of( @@ -257,6 +262,7 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { DriverContext driver2Context = driverContext(); drivers.add( new Driver( + "test", driver2Context, new ExchangeSourceOperator(sourceExchanger.createExchangeSource()), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java index afd4695db932..b960a12e6f90 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -303,6 +303,7 @@ public class TimeSeriesAggregationOperatorTests extends ComputeTestCase { List results = new ArrayList<>(); OperatorTestCase.runDriver( new Driver( + "test", ctx, sourceOperatorFactory.get(ctx), CollectionUtils.concatLists(intermediateOperators, List.of(intialAgg, intermediateAgg, finalAgg)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index d5c6d196a1b9..2edf156f92da 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -300,6 +300,7 @@ public class ExchangeServiceTests extends ESTestCase { DriverContext dc = driverContext(); Driver d = new Driver( "test-session:1", + "test", 0, 0, dc, @@ -318,6 +319,7 @@ public class ExchangeServiceTests extends ESTestCase { DriverContext dc = driverContext(); Driver d = new Driver( "test-session:2", + "test", 0, 0, dc, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index e63e8b63d6ee..49d91df556d1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -543,6 +543,7 @@ public class TopNOperatorTests extends OperatorTestCase { List> actualTop = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -633,6 +634,7 @@ public class TopNOperatorTests extends OperatorTestCase { List> actualTop = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -668,6 +670,7 @@ public class TopNOperatorTests extends OperatorTestCase { List> outputValues = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new TupleBlockSourceOperator(driverContext.blockFactory(), inputValues, randomIntBetween(1, 1000)), List.of( @@ -938,6 +941,7 @@ public class TopNOperatorTests extends OperatorTestCase { int topCount = randomIntBetween(1, values.size()); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(page).iterator()), List.of( @@ -1112,6 +1116,7 @@ public class TopNOperatorTests extends OperatorTestCase { List> actual = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( @@ -1239,6 +1244,7 @@ public class TopNOperatorTests extends OperatorTestCase { DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( @@ -1327,6 +1333,7 @@ public class TopNOperatorTests extends OperatorTestCase { DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -1367,6 +1374,7 @@ public class TopNOperatorTests extends OperatorTestCase { DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.range(0, docCount)), List.of( diff --git a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java index a46dca4ae38c..d9fca11ecdcf 100644 --- a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java @@ -190,6 +190,7 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase { List in = source.next(); try ( Driver d = new Driver( + "test", driverContext(), new CannedSourceOperator(in.iterator()), operators.get(), @@ -264,6 +265,7 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase { boolean success = false; try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input), operators, @@ -291,6 +293,7 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase { for (int i = 0; i < dummyDrivers; i++) { drivers.add( new Driver( + "test", "dummy-session", 0, 0, diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 601ce819224b..58c82d800954 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -40,8 +40,10 @@ import java.util.Map; import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.any; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItem; @@ -287,7 +289,6 @@ public class RestEsqlIT extends RestEsqlTestCase { equalTo(List.of(List.of(499.5d))) ); - List> signatures = new ArrayList<>(); @SuppressWarnings("unchecked") List> profiles = (List>) ((Map) result.get("profile")).get("drivers"); for (Map p : profiles) { @@ -299,26 +300,34 @@ public class RestEsqlIT extends RestEsqlTestCase { for (Map o : operators) { sig.add(checkOperatorProfile(o)); } - signatures.add(sig); + String taskDescription = p.get("task_description").toString(); + switch (taskDescription) { + case "data" -> assertMap( + sig, + matchesList().item("LuceneSourceOperator") + .item("ValuesSourceReaderOperator") + .item("AggregationOperator") + .item("ExchangeSinkOperator") + ); + case "node_reduce" -> assertThat( + sig, + either(matchesList().item("ExchangeSourceOperator").item("ExchangeSinkOperator")).or( + matchesList().item("ExchangeSourceOperator").item("AggregationOperator").item("ExchangeSinkOperator") + ) + ); + case "final" -> assertMap( + sig, + matchesList().item("ExchangeSourceOperator") + .item("AggregationOperator") + .item("ProjectOperator") + .item("LimitOperator") + .item("EvalOperator") + .item("ProjectOperator") + .item("OutputOperator") + ); + default -> throw new IllegalArgumentException("can't match " + taskDescription); + } } - var readProfile = matchesList().item("LuceneSourceOperator") - .item("ValuesSourceReaderOperator") - .item("AggregationOperator") - .item("ExchangeSinkOperator"); - var mergeProfile = matchesList().item("ExchangeSourceOperator") - .item("AggregationOperator") - .item("ProjectOperator") - .item("LimitOperator") - .item("EvalOperator") - .item("ProjectOperator") - .item("OutputOperator"); - var emptyReduction = matchesList().item("ExchangeSourceOperator").item("ExchangeSinkOperator"); - var reduction = matchesList().item("ExchangeSourceOperator").item("AggregationOperator").item("ExchangeSinkOperator"); - assertThat( - signatures, - Matchers.either(containsInAnyOrder(readProfile, reduction, mergeProfile)) - .or(containsInAnyOrder(readProfile, emptyReduction, mergeProfile)) - ); } public void testProfileOrdinalsGroupingOperator() throws IOException { @@ -391,6 +400,7 @@ public class RestEsqlIT extends RestEsqlTestCase { } signatures.add(sig); } + // TODO adapt this to use task_description once this is reenabled assertThat( signatures, containsInAnyOrder( @@ -491,10 +501,10 @@ public class RestEsqlIT extends RestEsqlTestCase { MapMatcher sleepMatcher = matchesMap().entry("reason", "exchange empty") .entry("sleep_millis", greaterThan(0L)) .entry("wake_millis", greaterThan(0L)); - if (operators.contains("LuceneSourceOperator")) { - assertMap(sleeps, matchesMap().entry("counts", Map.of()).entry("first", List.of()).entry("last", List.of())); - } else if (operators.contains("ExchangeSourceOperator")) { - if (operators.contains("ExchangeSinkOperator")) { + String taskDescription = p.get("task_description").toString(); + switch (taskDescription) { + case "data" -> assertMap(sleeps, matchesMap().entry("counts", Map.of()).entry("first", List.of()).entry("last", List.of())); + case "node_reduce" -> { assertMap(sleeps, matchesMap().entry("counts", matchesMap().entry("exchange empty", greaterThan(0))).extraOk()); @SuppressWarnings("unchecked") List> first = (List>) sleeps.get("first"); @@ -506,8 +516,8 @@ public class RestEsqlIT extends RestEsqlTestCase { for (Map s : last) { assertMap(s, sleepMatcher); } - - } else { + } + case "final" -> { assertMap( sleeps, matchesMap().entry("counts", matchesMap().entry("exchange empty", 1)) @@ -515,14 +525,14 @@ public class RestEsqlIT extends RestEsqlTestCase { .entry("last", List.of(sleepMatcher)) ); } - } else { - fail("unknown signature: " + operators); + default -> throw new IllegalArgumentException("unknown task: " + taskDescription); } } } private MapMatcher commonProfile() { - return matchesMap().entry("start_millis", greaterThan(0L)) + return matchesMap().entry("task_description", any(String.class)) + .entry("start_millis", greaterThan(0L)) .entry("stop_millis", greaterThan(0L)) .entry("iterations", greaterThan(0L)) .entry("cpu_nanos", greaterThan(0L)) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 85c03ce7860d..b15e4cfe739f 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -38,6 +38,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.hamcrest.Matcher; import org.junit.Before; import java.io.IOException; @@ -75,9 +76,6 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { private static final Logger LOGGER = LogManager.getLogger(EsqlActionTaskIT.class); - private String READ_DESCRIPTION; - private String MERGE_DESCRIPTION; - private String REDUCE_DESCRIPTION; private boolean nodeLevelReduction; /** @@ -89,21 +87,6 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); nodeLevelReduction = randomBoolean(); - READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES] - \\_ValuesSourceReaderOperator[fields = [pause_me]] - \\_AggregationOperator[mode = INITIAL, aggs = sum of longs] - \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); - MERGE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_AggregationOperator[mode = FINAL, aggs = sum of longs] - \\_ProjectOperator[projection = [0]] - \\_LimitOperator[limit = 1000] - \\_OutputOperator[columns = [sum(pause_me)]]"""; - REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction ? "\\_AggregationOperator[mode = INTERMEDIATE, aggs = sum of longs]\n" : "") - + "\\_ExchangeSinkOperator"; - } public void testTaskContents() throws Exception { @@ -120,9 +103,11 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { for (TaskInfo task : foundTasks) { DriverStatus status = (DriverStatus) task.status(); assertThat(status.sessionId(), not(emptyOrNullString())); + String taskDescription = status.taskDescription(); for (DriverStatus.OperatorStatus o : status.activeOperators()) { logger.info("status {}", o); if (o.operator().startsWith("LuceneSourceOperator[maxPageSize = " + pageSize())) { + assertThat(taskDescription, equalTo("data")); LuceneSourceOperator.Status oStatus = (LuceneSourceOperator.Status) o.status(); assertThat(oStatus.processedSlices(), lessThanOrEqualTo(oStatus.totalSlices())); assertThat(oStatus.processedQueries(), equalTo(Set.of("*:*"))); @@ -142,6 +127,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { continue; } if (o.operator().equals("ValuesSourceReaderOperator[fields = [pause_me]]")) { + assertThat(taskDescription, equalTo("data")); ValuesSourceReaderOperator.Status oStatus = (ValuesSourceReaderOperator.Status) o.status(); assertMap( oStatus.readersBuilt(), @@ -152,6 +138,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { continue; } if (o.operator().equals("ExchangeSourceOperator")) { + assertThat(taskDescription, either(equalTo("node_reduce")).or(equalTo("final"))); ExchangeSourceOperator.Status oStatus = (ExchangeSourceOperator.Status) o.status(); assertThat(oStatus.pagesWaiting(), greaterThanOrEqualTo(0)); assertThat(oStatus.pagesEmitted(), greaterThanOrEqualTo(0)); @@ -159,6 +146,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { continue; } if (o.operator().equals("ExchangeSinkOperator")) { + assertThat(taskDescription, either(equalTo("data")).or(equalTo("node_reduce"))); ExchangeSinkOperator.Status oStatus = (ExchangeSinkOperator.Status) o.status(); assertThat(oStatus.pagesReceived(), greaterThanOrEqualTo(0)); exchangeSinks++; @@ -169,6 +157,29 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { assertThat(valuesSourceReaders, equalTo(1)); assertThat(exchangeSinks, greaterThanOrEqualTo(1)); assertThat(exchangeSources, equalTo(2)); + assertThat( + dataTasks(foundTasks).get(0).description(), + equalTo( + """ + \\_LuceneSourceOperator[sourceStatus] + \\_ValuesSourceReaderOperator[fields = [pause_me]] + \\_AggregationOperator[mode = INITIAL, aggs = sum of longs] + \\_ExchangeSinkOperator""".replace( + "sourceStatus", + "dataPartitioning = SHARD, maxPageSize = " + pageSize() + ", limit = 2147483647, scoreMode = COMPLETE_NO_SCORES" + ) + ) + ); + assertThat( + nodeReduceTasks(foundTasks).get(0).description(), + nodeLevelReduceDescriptionMatcher(foundTasks, "\\_AggregationOperator[mode = INTERMEDIATE, aggs = sum of longs]\n") + ); + assertThat(coordinatorTasks(foundTasks).get(0).description(), equalTo(""" + \\_ExchangeSourceOperator[] + \\_AggregationOperator[mode = FINAL, aggs = sum of longs] + \\_ProjectOperator[projection = [0]] + \\_LimitOperator[limit = 1000] + \\_OutputOperator[columns = [sum(pause_me)]]""")); } finally { scriptPermits.release(numberOfDocs()); try (EsqlQueryResponse esqlResponse = response.get()) { @@ -181,7 +192,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { ActionFuture response = startEsql(); try { List infos = getTasksStarting(); - TaskInfo running = infos.stream().filter(t -> t.description().equals(READ_DESCRIPTION)).findFirst().get(); + TaskInfo running = infos.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("data")).findFirst().get(); cancelTask(running.taskId()); assertCancelled(response); } finally { @@ -193,7 +204,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { ActionFuture response = startEsql(); try { List infos = getTasksStarting(); - TaskInfo running = infos.stream().filter(t -> t.description().equals(MERGE_DESCRIPTION)).findFirst().get(); + TaskInfo running = infos.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("final")).findFirst().get(); cancelTask(running.taskId()); assertCancelled(response); } finally { @@ -277,8 +288,8 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { for (TaskInfo task : tasks) { assertThat(task.action(), equalTo(DriverTaskRunner.ACTION_NAME)); DriverStatus status = (DriverStatus) task.status(); - logger.info("task {} {}", task.description(), status); - assertThat(task.description(), anyOf(equalTo(READ_DESCRIPTION), equalTo(MERGE_DESCRIPTION), equalTo(REDUCE_DESCRIPTION))); + logger.info("task {} {} {}", status.taskDescription(), task.description(), status); + assertThat(status.taskDescription(), anyOf(equalTo("data"), equalTo("node_reduce"), equalTo("final"))); /* * Accept tasks that are either starting or have gone * immediately async. The coordinating task is likely @@ -302,8 +313,8 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { for (TaskInfo task : tasks) { assertThat(task.action(), equalTo(DriverTaskRunner.ACTION_NAME)); DriverStatus status = (DriverStatus) task.status(); - assertThat(task.description(), anyOf(equalTo(READ_DESCRIPTION), equalTo(MERGE_DESCRIPTION), equalTo(REDUCE_DESCRIPTION))); - if (task.description().equals(READ_DESCRIPTION)) { + assertThat(status.taskDescription(), anyOf(equalTo("data"), equalTo("node_reduce"), equalTo("final"))); + if (status.taskDescription().equals("data")) { assertThat(status.status(), equalTo(DriverStatus.Status.RUNNING)); } else { assertThat(status.status(), equalTo(DriverStatus.Status.ASYNC)); @@ -328,23 +339,26 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { .get() .getTasks(); assertThat(tasks, hasSize(equalTo(3))); - List readTasks = tasks.stream().filter(t -> t.description().equals(READ_DESCRIPTION)).toList(); - List mergeTasks = tasks.stream().filter(t -> t.description().equals(MERGE_DESCRIPTION)).toList(); - assertThat(readTasks, hasSize(1)); - assertThat(mergeTasks, hasSize(1)); - // node-level reduction is disabled when the target data node is also the coordinator - if (readTasks.get(0).node().equals(mergeTasks.get(0).node())) { - REDUCE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_ExchangeSinkOperator"""; - } - List reduceTasks = tasks.stream().filter(t -> t.description().equals(REDUCE_DESCRIPTION)).toList(); - assertThat(reduceTasks, hasSize(1)); + assertThat(dataTasks(tasks), hasSize(1)); + assertThat(nodeReduceTasks(tasks), hasSize(1)); + assertThat(coordinatorTasks(tasks), hasSize(1)); foundTasks.addAll(tasks); }); return foundTasks; } + private List dataTasks(List tasks) { + return tasks.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("data")).toList(); + } + + private List nodeReduceTasks(List tasks) { + return tasks.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("node_reduce")).toList(); + } + + private List coordinatorTasks(List tasks) { + return tasks.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("final")).toList(); + } + private void assertCancelled(ActionFuture response) throws Exception { Exception e = expectThrows(Exception.class, response); Throwable cancelException = ExceptionsHelper.unwrap(e, TaskCancelledException.class); @@ -477,30 +491,41 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { } public void testTaskContentsForTopNQuery() throws Exception { - READ_DESCRIPTION = ("\\_LuceneTopNSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 1000, " - + "scoreMode = TOP_DOCS, " - + "sorts = [{\"pause_me\":{\"order\":\"asc\",\"missing\":\"_last\",\"unmapped_type\":\"long\"}}]]\n" - + "\\_ValuesSourceReaderOperator[fields = [pause_me]]\n" - + "\\_ProjectOperator[projection = [1]]\n" - + "\\_ExchangeSinkOperator").replace("pageSize()", Integer.toString(pageSize())); - MERGE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " - + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" - + "\\_ProjectOperator[projection = [0]]\n" - + "\\_OutputOperator[columns = [pause_me]]"; - REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction - ? "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " - + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" - : "") - + "\\_ExchangeSinkOperator"; - ActionFuture response = startEsql("from test | sort pause_me | keep pause_me"); try { getTasksStarting(); logger.info("unblocking script"); scriptPermits.release(pageSize()); - getTasksRunning(); + List tasks = getTasksRunning(); + String sortStatus = """ + [{"pause_me":{"order":"asc","missing":"_last","unmapped_type":"long"}}]"""; + String sourceStatus = "dataPartitioning = SHARD, maxPageSize = " + + pageSize() + + ", limit = 1000, scoreMode = TOP_DOCS, sorts = " + + sortStatus; + assertThat(dataTasks(tasks).get(0).description(), equalTo(""" + \\_LuceneTopNSourceOperator[sourceStatus] + \\_ValuesSourceReaderOperator[fields = [pause_me]] + \\_ProjectOperator[projection = [1]] + \\_ExchangeSinkOperator""".replace("sourceStatus", sourceStatus))); + assertThat( + nodeReduceTasks(tasks).get(0).description(), + nodeLevelReduceDescriptionMatcher( + tasks, + "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" + ) + ); + assertThat( + coordinatorTasks(tasks).get(0).description(), + equalTo( + "\\_ExchangeSourceOperator[]\n" + + "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" + + "\\_ProjectOperator[projection = [0]]\n" + + "\\_OutputOperator[columns = [pause_me]]" + ) + ); } finally { // each scripted field "emit" is called by LuceneTopNSourceOperator and by ValuesSourceReaderOperator scriptPermits.release(2 * numberOfDocs()); @@ -512,26 +537,26 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { public void testTaskContentsForLimitQuery() throws Exception { String limit = Integer.toString(randomIntBetween(pageSize() + 1, 2 * numberOfDocs())); - READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit(), scoreMode = COMPLETE_NO_SCORES] - \\_ValuesSourceReaderOperator[fields = [pause_me]] - \\_ProjectOperator[projection = [1]] - \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())).replace("limit()", limit); - MERGE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_LimitOperator[limit = limit()] - \\_ProjectOperator[projection = [0]] - \\_OutputOperator[columns = [pause_me]]""".replace("limit()", limit); - REDUCE_DESCRIPTION = ("\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction ? "\\_LimitOperator[limit = limit()]\n" : "") - + "\\_ExchangeSinkOperator").replace("limit()", limit); - ActionFuture response = startEsql("from test | keep pause_me | limit " + limit); try { getTasksStarting(); logger.info("unblocking script"); scriptPermits.release(pageSize() - prereleasedDocs); - getTasksRunning(); + List tasks = getTasksRunning(); + assertThat(dataTasks(tasks).get(0).description(), equalTo(""" + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit(), scoreMode = COMPLETE_NO_SCORES] + \\_ValuesSourceReaderOperator[fields = [pause_me]] + \\_ProjectOperator[projection = [1]] + \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())).replace("limit()", limit))); + assertThat( + nodeReduceTasks(tasks).get(0).description(), + nodeLevelReduceDescriptionMatcher(tasks, "\\_LimitOperator[limit = " + limit + "]\n") + ); + assertThat(coordinatorTasks(tasks).get(0).description(), equalTo(""" + \\_ExchangeSourceOperator[] + \\_LimitOperator[limit = limit()] + \\_ProjectOperator[projection = [0]] + \\_OutputOperator[columns = [pause_me]]""".replace("limit()", limit))); } finally { scriptPermits.release(numberOfDocs()); try (EsqlQueryResponse esqlResponse = response.get()) { @@ -541,27 +566,35 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { } public void testTaskContentsForGroupingStatsQuery() throws Exception { - READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES] - \\_ValuesSourceReaderOperator[fields = [foo]] - \\_OrdinalsGroupingOperator(aggs = max of longs) - \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); - MERGE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_HashAggregationOperator[mode = , aggs = max of longs] - \\_ProjectOperator[projection = [1, 0]] - \\_LimitOperator[limit = 1000] - \\_OutputOperator[columns = [max(foo), pause_me]]"""; - REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction ? "\\_HashAggregationOperator[mode = , aggs = max of longs]\n" : "") - + "\\_ExchangeSinkOperator"; - ActionFuture response = startEsql("from test | stats max(foo) by pause_me"); try { getTasksStarting(); logger.info("unblocking script"); scriptPermits.release(pageSize()); - getTasksRunning(); + List tasks = getTasksRunning(); + String sourceStatus = "dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES" + .replace("pageSize()", Integer.toString(pageSize())); + assertThat( + dataTasks(tasks).get(0).description(), + equalTo( + """ + \\_LuceneSourceOperator[sourceStatus] + \\_ValuesSourceReaderOperator[fields = [foo]] + \\_OrdinalsGroupingOperator(aggs = max of longs) + \\_ExchangeSinkOperator""".replace("sourceStatus", sourceStatus) + + ) + ); + assertThat( + nodeReduceTasks(tasks).get(0).description(), + nodeLevelReduceDescriptionMatcher(tasks, "\\_HashAggregationOperator[mode = , aggs = max of longs]\n") + ); + assertThat(coordinatorTasks(tasks).get(0).description(), equalTo(""" + \\_ExchangeSourceOperator[] + \\_HashAggregationOperator[mode = , aggs = max of longs] + \\_ProjectOperator[projection = [1, 0]] + \\_LimitOperator[limit = 1000] + \\_OutputOperator[columns = [max(foo), pause_me]]""")); } finally { scriptPermits.release(numberOfDocs()); try (EsqlQueryResponse esqlResponse = response.get()) { @@ -572,6 +605,13 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { } } + private Matcher nodeLevelReduceDescriptionMatcher(List tasks, String nodeReduce) { + boolean matchNodeReduction = nodeLevelReduction + // If the data node and the coordinator are the same node then we don't reduce aggs in it. + && false == dataTasks(tasks).get(0).node().equals(coordinatorTasks(tasks).get(0).node()); + return equalTo("\\_ExchangeSourceOperator[]\n" + (matchNodeReduction ? nodeReduce : "") + "\\_ExchangeSinkOperator"); + } + @Override protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index 15bbc06836de..1bbcc46c0555 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -227,6 +227,7 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { DriverContext driverContext = driverContext(); try ( var driver = new Driver( + "test", driverContext, source.get(driverContext), List.of(reader.get(driverContext), lookup.get(driverContext)), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index 710fabb6a37c..1abb1ee92776 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -207,7 +206,7 @@ public abstract class AbstractLookupService> outListener) { ClusterState clusterState = clusterService.state(); - GroupShardsIterator shardIterators = clusterService.operationRouting() + List shardIterators = clusterService.operationRouting() .searchShards(clusterState.projectState(), new String[] { request.index }, Map.of(), "_local"); if (shardIterators.size() != 1) { outListener.onFailure(new EsqlIllegalArgumentException("target index {} has more than one shard", request.index)); @@ -327,6 +326,7 @@ public abstract class AbstractLookupService(), new Holder<>(DriverParallelism.SINGLE), @@ -190,7 +190,7 @@ public class LocalExecutionPlanner { final TimeValue statusInterval = configuration.pragmas().statusInterval(); context.addDriverFactory( new DriverFactory( - new DriverSupplier(context.bigArrays, context.blockFactory, physicalOperation, statusInterval, settings), + new DriverSupplier(taskDescription, context.bigArrays, context.blockFactory, physicalOperation, statusInterval, settings), context.driverParallelism().get() ) ); @@ -831,6 +831,7 @@ public class LocalExecutionPlanner { } record DriverSupplier( + String taskDescription, BigArrays bigArrays, BlockFactory blockFactory, PhysicalOperation physicalOperation, @@ -857,6 +858,7 @@ public class LocalExecutionPlanner { success = true; return new Driver( sessionId, + taskDescription, System.currentTimeMillis(), System.nanoTime(), driverContext, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java index 5953be62e831..a2a5e5175c4e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java @@ -231,6 +231,7 @@ final class ClusterComputeHandler implements TransportRequestHandler searchContexts, Configuration configuration, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 55c093554d7a..c494c63d0fae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -155,6 +155,7 @@ public class ComputeService { } var computeContext = new ComputeContext( newChildSession(sessionId), + "single", LOCAL_CLUSTER, List.of(), configuration, @@ -226,6 +227,7 @@ public class ComputeService { rootTask, new ComputeContext( sessionId, + "final", LOCAL_CLUSTER, List.of(), configuration, @@ -394,7 +396,7 @@ public class ComputeService { // the planner will also set the driver parallelism in LocalExecutionPlanner.LocalExecutionPlan (used down below) // it's doing this in the planning of EsQueryExec (the source of the data) // see also EsPhysicalOperationProviders.sourcePhysicalOperation - LocalExecutionPlanner.LocalExecutionPlan localExecutionPlan = planner.plan(context.foldCtx(), plan); + LocalExecutionPlanner.LocalExecutionPlan localExecutionPlan = planner.plan(context.taskDescription(), context.foldCtx(), plan); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Local execution plan:\n{}", localExecutionPlan.describe()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java index 702093281942..40a87fca4dc2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java @@ -372,6 +372,7 @@ final class DataNodeComputeHandler implements TransportRequestHandler drivers = new ArrayList<>(); LocalExecutionPlan coordinatorNodeExecutionPlan = executionPlanner.plan( + "final", foldCtx, new OutputExec(coordinatorPlan, collectedPages::add) ); @@ -660,7 +661,7 @@ public class CsvTests extends ESTestCase { throw new AssertionError("expected no failure", e); }) ); - LocalExecutionPlan dataNodeExecutionPlan = executionPlanner.plan(foldCtx, csvDataNodePhysicalPlan); + LocalExecutionPlan dataNodeExecutionPlan = executionPlanner.plan("data", foldCtx, csvDataNodePhysicalPlan); drivers.addAll(dataNodeExecutionPlan.createDrivers(getTestName())); Randomness.shuffle(drivers); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java index ebfe1c814707..cc4e70632d67 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.action; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.compute.data.BlockWritables; @@ -49,6 +51,7 @@ public class EsqlQueryResponseProfileTests extends AbstractWireSerializingTestCa private DriverProfile randomDriverProfile() { return new DriverProfile( + RandomStrings.randomAsciiLettersOfLength(random(), 5), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 4fdb4a7bf042..065495cbad93 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -723,6 +723,7 @@ public class EsqlQueryResponseTests extends AbstractChunkedSerializingTestCase> findFieldNamesInLookupJoinDescription(LocalExecutionPlanner.LocalExecutionPlan physicalOperations) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index e1e606a6e84b..7e5143d5a3ac 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -84,6 +84,7 @@ public class LocalExecutionPlannerTests extends MapperServiceTestCase { public void testLuceneSourceOperatorHugeRowSize() throws IOException { int estimatedRowSize = randomEstimatedRowSize(estimatedRowSizeIsHuge); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + "test", FoldContext.small(), new EsQueryExec( Source.EMPTY, @@ -110,6 +111,7 @@ public class LocalExecutionPlannerTests extends MapperServiceTestCase { EsQueryExec.FieldSort sort = new EsQueryExec.FieldSort(sortField, Order.OrderDirection.ASC, Order.NullsPosition.LAST); Literal limit = new Literal(Source.EMPTY, 10, DataType.INTEGER); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + "test", FoldContext.small(), new EsQueryExec( Source.EMPTY, @@ -136,6 +138,7 @@ public class LocalExecutionPlannerTests extends MapperServiceTestCase { EsQueryExec.GeoDistanceSort sort = new EsQueryExec.GeoDistanceSort(sortField, Order.OrderDirection.ASC, 1, -1); Literal limit = new Literal(Source.EMPTY, 10, DataType.INTEGER); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + "test", FoldContext.small(), new EsQueryExec( Source.EMPTY, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java index 7db3216d1736..f4deaa45f1f8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.plugin; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.PlainActionFuture; @@ -62,6 +64,7 @@ public class ComputeListenerTests extends ESTestCase { for (int i = 0; i < numProfiles; i++) { profiles.add( new DriverProfile( + RandomStrings.randomAsciiLettersOfLength(random(), 5), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java index a22e179479de..903961794b33 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java @@ -44,10 +44,12 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.XContentFormattedException; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEvent; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventField; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventParser; @@ -80,6 +82,7 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { private static final String REQUEST_COUNT = "request_count"; private static final String WITH_ERROR = "with_error"; private static final String ERROR_ROUTE = "/_inference_error"; + private static final String FORMATTED_ERROR_ROUTE = "/_formatted_inference_error"; private static final String NO_STREAM_ROUTE = "/_inference_no_stream"; private static final Exception expectedException = new IllegalStateException("hello there"); private static final String expectedExceptionAsServerSentEvent = """ @@ -88,6 +91,11 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { "type":"illegal_state_exception","reason":"hello there"},"status":500\ }"""; + private static final Exception expectedFormattedException = new XContentFormattedException( + expectedException, + RestStatus.INTERNAL_SERVER_ERROR + ); + @Override protected boolean addMockHttpTransport() { return false; @@ -145,6 +153,16 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { new ServerSentEventsRestActionListener(channel, threadPool).onFailure(expectedException); } + }, new RestHandler() { + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.POST, FORMATTED_ERROR_ROUTE)); + } + + @Override + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { + new ServerSentEventsRestActionListener(channel, threadPool).onFailure(expectedFormattedException); + } }, new RestHandler() { @Override public List routes() { @@ -424,6 +442,21 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { assertThat(collector.stringsVerified.getLast(), equalTo(expectedExceptionAsServerSentEvent)); } + public void testFormattedError() throws IOException { + var request = new Request(RestRequest.Method.POST.name(), FORMATTED_ERROR_ROUTE); + + try { + getRestClient().performRequest(request); + fail("Expected an exception to be thrown from the error route"); + } catch (ResponseException e) { + var response = e.getResponse(); + assertThat(response.getStatusLine().getStatusCode(), is(HttpStatus.SC_INTERNAL_SERVER_ERROR)); + assertThat(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), equalTo(""" + \uFEFFevent: error + data:\s""" + expectedExceptionAsServerSentEvent + "\n\n")); + } + } + public void testNoStream() { var collector = new RandomStringCollector(); var expectedTestCount = randomIntBetween(2, 30); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java index bcfeef9f4af9..4afafc5adf0c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java @@ -50,9 +50,11 @@ import org.elasticsearch.xpack.inference.telemetry.InferenceTimer; import java.io.IOException; import java.util.Random; import java.util.concurrent.Executor; +import java.util.concurrent.Flow; import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.InferencePlugin.INFERENCE_API_FEATURE; import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.modelAttributes; @@ -280,7 +282,9 @@ public abstract class BaseTransportInferenceAction streamErrorHandler(Flow.Processor upstream) { + return upstream; + } + private void recordMetrics(Model model, InferenceTimer timer, @Nullable Throwable t) { try { - inferenceStats.inferenceDuration().record(timer.elapsedMillis(), responseAttributes(model, t)); + inferenceStats.inferenceDuration().record(timer.elapsedMillis(), responseAttributes(model, unwrapCause(t))); } catch (Exception e) { log.atDebug().withThrowable(e).log("Failed to record metrics with a parsed model, dropping metrics"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java index 2e3090f2afd5..1144a11d86cc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.InferenceServiceResults; @@ -20,14 +21,19 @@ import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; +import java.util.concurrent.Flow; + public class TransportUnifiedCompletionInferenceAction extends BaseTransportInferenceAction { @Inject @@ -86,4 +92,40 @@ public class TransportUnifiedCompletionInferenceAction extends BaseTransportInfe ) { service.unifiedCompletionInfer(model, request.getUnifiedCompletionRequest(), null, listener); } + + @Override + protected void doExecute(Task task, UnifiedCompletionAction.Request request, ActionListener listener) { + super.doExecute(task, request, listener.delegateResponse((l, e) -> l.onFailure(UnifiedChatCompletionException.fromThrowable(e)))); + } + + /** + * If we get any errors, either in {@link #doExecute} via the listener.onFailure or while streaming, make sure that they are formatted + * as {@link UnifiedChatCompletionException}. + */ + @Override + protected Flow.Publisher streamErrorHandler(Flow.Processor upstream) { + return downstream -> { + upstream.subscribe(new Flow.Subscriber<>() { + @Override + public void onSubscribe(Flow.Subscription subscription) { + downstream.onSubscribe(subscription); + } + + @Override + public void onNext(ChunkedToXContent item) { + downstream.onNext(item); + } + + @Override + public void onError(Throwable throwable) { + downstream.onError(UnifiedChatCompletionException.fromThrowable(throwable)); + } + + @Override + public void onComplete() { + downstream.onComplete(); + } + }); + }; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java index e1438dde76c9..db09317b7b79 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java @@ -9,13 +9,16 @@ package org.elasticsearch.xpack.inference.external.elastic; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.core.inference.results.StreamingUnifiedChatCompletionResults; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.openai.OpenAiUnifiedStreamingProcessor; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventParser; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventProcessor; +import java.util.Locale; import java.util.concurrent.Flow; public class ElasticInferenceServiceUnifiedChatCompletionResponseHandler extends ElasticInferenceServiceResponseHandler { @@ -32,4 +35,21 @@ public class ElasticInferenceServiceUnifiedChatCompletionResponseHandler extends serverSentEventProcessor.subscribe(openAiProcessor); return new StreamingUnifiedChatCompletionResults(openAiProcessor); } + + @Override + protected Exception buildError(String message, Request request, HttpResult result, ErrorResponse errorResponse) { + assert request.isStreaming() : "Only streaming requests support this format"; + var responseStatusCode = result.response().getStatusLine().getStatusCode(); + if (request.isStreaming()) { + var restStatus = toRestStatus(responseStatusCode); + return new UnifiedChatCompletionException( + restStatus, + errorMessage(message, request, result, errorResponse, responseStatusCode), + "error", + restStatus.name().toLowerCase(Locale.ROOT) + ); + } else { + return super.buildError(message, request, result, errorResponse); + } + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java index ed852e5177ac..cb5ed53fc558 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java @@ -107,33 +107,26 @@ public abstract class BaseResponseHandler implements ResponseHandler { protected Exception buildError(String message, Request request, HttpResult result, ErrorResponse errorResponse) { var responseStatusCode = result.response().getStatusLine().getStatusCode(); - - if (errorResponse == null - || errorResponse.errorStructureFound() == false - || Strings.isNullOrEmpty(errorResponse.getErrorMessage())) { - return new ElasticsearchStatusException( - format( - "%s for request from inference entity id [%s] status [%s]", - message, - request.getInferenceEntityId(), - responseStatusCode - ), - toRestStatus(responseStatusCode) - ); - } - return new ElasticsearchStatusException( - format( - "%s for request from inference entity id [%s] status [%s]. Error message: [%s]", - message, - request.getInferenceEntityId(), - responseStatusCode, - errorResponse.getErrorMessage() - ), + errorMessage(message, request, result, errorResponse, responseStatusCode), toRestStatus(responseStatusCode) ); } + protected String errorMessage(String message, Request request, HttpResult result, ErrorResponse errorResponse, int statusCode) { + return (errorResponse == null + || errorResponse.errorStructureFound() == false + || Strings.isNullOrEmpty(errorResponse.getErrorMessage())) + ? format("%s for request from inference entity id [%s] status [%s]", message, request.getInferenceEntityId(), statusCode) + : format( + "%s for request from inference entity id [%s] status [%s]. Error message: [%s]", + message, + request.getInferenceEntityId(), + statusCode, + errorResponse.getErrorMessage() + ); + } + public static RestStatus toRestStatus(int statusCode) { RestStatus code = null; if (statusCode < 500) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java index 7607e5e4ed3a..99f2a7c31e7d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java @@ -8,15 +8,26 @@ package org.elasticsearch.xpack.inference.external.openai; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; import org.elasticsearch.xpack.inference.external.request.Request; +import java.util.function.Function; + public class OpenAiChatCompletionResponseHandler extends OpenAiResponseHandler { public OpenAiChatCompletionResponseHandler(String requestType, ResponseParser parseFunction) { super(requestType, parseFunction, true); } + protected OpenAiChatCompletionResponseHandler( + String requestType, + ResponseParser parseFunction, + Function errorParseFunction + ) { + super(requestType, parseFunction, errorParseFunction, true); + } + @Override protected RetryException buildExceptionHandling429(Request request, HttpResult result) { // We don't retry, if the chat completion input is too large diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java index e0bc341fc679..8698955868a7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionRes import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.ContentTooLargeException; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; import org.elasticsearch.xpack.inference.external.request.Request; @@ -21,6 +22,7 @@ import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentE import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventProcessor; import java.util.concurrent.Flow; +import java.util.function.Function; import static org.elasticsearch.xpack.inference.external.http.retry.ResponseHandlerUtils.getFirstHeaderOrUnknown; @@ -42,7 +44,16 @@ public class OpenAiResponseHandler extends BaseResponseHandler { static final String OPENAI_SERVER_BUSY = "Received a server busy error status code"; public OpenAiResponseHandler(String requestType, ResponseParser parseFunction, boolean canHandleStreamingResponses) { - super(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse, canHandleStreamingResponses); + this(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse, canHandleStreamingResponses); + } + + protected OpenAiResponseHandler( + String requestType, + ResponseParser parseFunction, + Function errorParseFunction, + boolean canHandleStreamingResponses + ) { + super(requestType, parseFunction, errorParseFunction, canHandleStreamingResponses); } /** diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java index fce2556efc5e..2901b449f8a6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java @@ -7,19 +7,31 @@ package org.elasticsearch.xpack.inference.external.openai; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.results.StreamingUnifiedChatCompletionResults; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventParser; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventProcessor; +import java.util.Locale; +import java.util.Objects; +import java.util.Optional; import java.util.concurrent.Flow; public class OpenAiUnifiedChatCompletionResponseHandler extends OpenAiChatCompletionResponseHandler { public OpenAiUnifiedChatCompletionResponseHandler(String requestType, ResponseParser parseFunction) { - super(requestType, parseFunction); + super(requestType, parseFunction, OpenAiErrorResponse::fromResponse); } @Override @@ -31,4 +43,92 @@ public class OpenAiUnifiedChatCompletionResponseHandler extends OpenAiChatComple serverSentEventProcessor.subscribe(openAiProcessor); return new StreamingUnifiedChatCompletionResults(openAiProcessor); } + + @Override + protected Exception buildError(String message, Request request, HttpResult result, ErrorResponse errorResponse) { + assert request.isStreaming() : "Only streaming requests support this format"; + var responseStatusCode = result.response().getStatusLine().getStatusCode(); + if (request.isStreaming()) { + var errorMessage = errorMessage(message, request, result, errorResponse, responseStatusCode); + var restStatus = toRestStatus(responseStatusCode); + return errorResponse instanceof OpenAiErrorResponse oer + ? new UnifiedChatCompletionException(restStatus, errorMessage, oer.type(), oer.code(), oer.param()) + : new UnifiedChatCompletionException( + restStatus, + errorMessage, + errorResponse != null ? errorResponse.getClass().getSimpleName() : "unknown", + restStatus.name().toLowerCase(Locale.ROOT) + ); + } else { + return super.buildError(message, request, result, errorResponse); + } + } + + private static class OpenAiErrorResponse extends ErrorResponse { + private static final ConstructingObjectParser, Void> ERROR_PARSER = new ConstructingObjectParser<>( + "open_ai_error", + true, + args -> Optional.ofNullable((OpenAiErrorResponse) args[0]) + ); + private static final ConstructingObjectParser ERROR_BODY_PARSER = new ConstructingObjectParser<>( + "open_ai_error", + true, + args -> new OpenAiErrorResponse((String) args[0], (String) args[1], (String) args[2], (String) args[3]) + ); + + static { + ERROR_BODY_PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("message")); + ERROR_BODY_PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("code")); + ERROR_BODY_PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("param")); + ERROR_BODY_PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("type")); + + ERROR_PARSER.declareObjectOrNull( + ConstructingObjectParser.optionalConstructorArg(), + ERROR_BODY_PARSER, + null, + new ParseField("error") + ); + } + + private static ErrorResponse fromResponse(HttpResult response) { + try ( + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY, response.body()) + ) { + return ERROR_PARSER.apply(parser, null).orElse(ErrorResponse.UNDEFINED_ERROR); + } catch (Exception e) { + // swallow the error + } + + return ErrorResponse.UNDEFINED_ERROR; + } + + @Nullable + private final String code; + @Nullable + private final String param; + private final String type; + + OpenAiErrorResponse(String errorMessage, @Nullable String code, @Nullable String param, String type) { + super(errorMessage); + this.code = code; + this.param = param; + this.type = Objects.requireNonNull(type); + } + + @Nullable + public String code() { + return code; + } + + @Nullable + public String param() { + return param; + } + + public String type() { + return type; + } + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java index 51f1bc48c830..0efd31a6832c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import java.io.IOException; import java.util.List; @@ -57,7 +58,10 @@ public class RestUnifiedCompletionInferenceAction extends BaseRestHandler { return channel -> client.execute( UnifiedCompletionAction.INSTANCE, request, - new ServerSentEventsRestActionListener(channel, threadPool) + new ServerSentEventsRestActionListener(channel, threadPool).delegateResponse((l, e) -> { + // format any validation exceptions from the rest -> transport path as UnifiedChatCompletionException + l.onFailure(UnifiedChatCompletionException.fromThrowable(e)); + }) ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java index 6991e1325f3b..cadf3e5f1806 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java @@ -35,15 +35,19 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.XContentFormattedException; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.Iterator; +import java.util.Map; import java.util.Objects; import java.util.concurrent.Flow; import java.util.concurrent.atomic.AtomicBoolean; +import static org.elasticsearch.xpack.core.inference.results.XContentFormattedException.X_CONTENT_PARAM; + /** * A version of {@link org.elasticsearch.rest.action.RestChunkedToXContentListener} that reads from a {@link Flow.Publisher} and encodes * the response in Server-Sent Events. @@ -72,7 +76,7 @@ public class ServerSentEventsRestActionListener implements ActionListener threadPool) { this.channel = channel; - this.params = params; + this.params = new ToXContent.DelegatingMapParams(Map.of(X_CONTENT_PARAM, String.valueOf(channel.detailedErrorsEnabled())), params); this.threadPool = Objects.requireNonNull(threadPool); } @@ -150,6 +154,12 @@ public class ServerSentEventsRestActionListener implements ActionListener Iterators.concat( ChunkedToXContentHelper.startObject(), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 9a150a5e1377..c7f19adb269a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -15,6 +15,7 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.LazyInitializable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -33,6 +34,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnifiedCompletionRequest; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; @@ -109,8 +111,11 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi private static final Logger logger = LogManager.getLogger(ElasticsearchInternalService.class); private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(ElasticsearchInternalService.class); + private final Settings settings; + public ElasticsearchInternalService(InferenceServiceExtension.InferenceServiceFactoryContext context) { super(context); + this.settings = context.settings(); } // for testing @@ -119,6 +124,7 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi Consumer> platformArch ) { super(context, platformArch); + this.settings = context.settings(); } @Override @@ -837,12 +843,17 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi @Override public void updateModelsWithDynamicFields(List models, ActionListener> listener) { - if (models.isEmpty()) { listener.onResponse(models); return; } + // if ML is disabled, do not update Deployment Stats (there won't be changes) + if (XPackSettings.MACHINE_LEARNING_ENABLED.get(settings) == false) { + listener.onResponse(models); + return; + } + var modelsByDeploymentIds = new HashMap(); for (var model : models) { assert model instanceof ElasticsearchInternalModel; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java index 562c99c0887b..56966ca40c47 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.inference.action; -import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.node.NodeClient; @@ -47,9 +47,9 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.assertArg; -import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -127,8 +127,7 @@ public abstract class BaseTransportInferenceActionTestCase { assertThat(attributes.get("service"), nullValue()); @@ -148,7 +147,13 @@ public abstract class BaseTransportInferenceActionTestCase listener = mock(); + ActionListener listener = spy(new ActionListener<>() { + @Override + public void onResponse(InferenceAction.Response o) {} + + @Override + public void onFailure(Exception e) {} + }); action.doExecute(mock(), request, listener); return listener; } @@ -161,9 +166,9 @@ public abstract class BaseTransportInferenceActionTestCase { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(ElasticsearchException.class)); assertThat(e.getMessage(), is("Unknown service [" + serviceId + "] for model [" + inferenceId + "]. ")); - assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + assertThat(((ElasticsearchException) e).status(), is(RestStatus.BAD_REQUEST)); })); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); @@ -192,7 +197,7 @@ public abstract class BaseTransportInferenceActionTestCase { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(ElasticsearchException.class)); assertThat( e.getMessage(), is( @@ -203,7 +208,7 @@ public abstract class BaseTransportInferenceActionTestCase { assertThat(attributes.get("service"), is(serviceId)); @@ -221,7 +226,6 @@ public abstract class BaseTransportInferenceActionTestCase { assertThat(attributes.get("service"), is(serviceId)); assertThat(attributes.get("task_type"), is(taskType.toString())); @@ -239,8 +243,8 @@ public abstract class BaseTransportInferenceActionTestCase { - assertThat(e, isA(ElasticsearchStatusException.class)); - var ese = (ElasticsearchStatusException) e; + assertThat(e, isA(ElasticsearchException.class)); + var ese = (ElasticsearchException) e; assertThat(ese.getMessage(), is("Streaming is not allowed for service [" + serviceId + "].")); assertThat(ese.status(), is(expectedStatus)); })); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java index e6b5c6d33613..7dac6a1015aa 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.action; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.InferenceServiceRegistry; @@ -17,6 +16,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; @@ -81,12 +81,12 @@ public class TransportUnifiedCompletionActionTests extends BaseTransportInferenc var listener = doExecute(requestTaskType); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(UnifiedChatCompletionException.class)); assertThat( e.getMessage(), is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [chat_completion]") ); - assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + assertThat(((UnifiedChatCompletionException) e).status(), is(RestStatus.BAD_REQUEST)); })); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); @@ -106,12 +106,12 @@ public class TransportUnifiedCompletionActionTests extends BaseTransportInferenc var listener = doExecute(requestTaskType); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(UnifiedChatCompletionException.class)); assertThat( e.getMessage(), is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [chat_completion]") ); - assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + assertThat(((UnifiedChatCompletionException) e).status(), is(RestStatus.BAD_REQUEST)); })); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandlerTests.java new file mode 100644 index 000000000000..4853aa8d2c56 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandlerTests.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.openai; + +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.elasticsearch.ExceptionsHelper.unwrapCause; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isA; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class OpenAiUnifiedChatCompletionResponseHandlerTests extends ESTestCase { + private final OpenAiUnifiedChatCompletionResponseHandler responseHandler = new OpenAiUnifiedChatCompletionResponseHandler( + "chat completions", + (a, b) -> mock() + ); + + public void testFailValidationWithAllFields() throws IOException { + var responseJson = """ + { + "error": { + "type": "not_found_error", + "message": "a message", + "code": "ahh", + "param": "model" + } + } + """; + + var errorJson = invalidResponseJson(responseJson); + + assertThat(errorJson, is(""" + {"error":{"code":"ahh","message":"Received a server error status code for request from inference entity id [abc] status [500]. \ + Error message: [a message]","param":"model","type":"not_found_error"}}""")); + } + + public void testFailValidationWithoutOptionalFields() throws IOException { + var responseJson = """ + { + "error": { + "type": "not_found_error", + "message": "a message" + } + } + """; + + var errorJson = invalidResponseJson(responseJson); + + assertThat(errorJson, is(""" + {"error":{"message":"Received a server error status code for request from inference entity id [abc] status [500]. \ + Error message: [a message]","type":"not_found_error"}}""")); + } + + public void testFailValidationWithInvalidJson() throws IOException { + var responseJson = """ + what? this isn't a json + """; + + var errorJson = invalidResponseJson(responseJson); + + assertThat(errorJson, is(""" + {"error":{"code":"bad_request","message":"Received a server error status code for request from inference entity id [abc] status\ + [500]","type":"ErrorResponse"}}""")); + } + + private String invalidResponseJson(String responseJson) throws IOException { + var exception = invalidResponse(responseJson); + assertThat(exception, isA(RetryException.class)); + assertThat(unwrapCause(exception), isA(UnifiedChatCompletionException.class)); + return toJson((UnifiedChatCompletionException) unwrapCause(exception)); + } + + private Exception invalidResponse(String responseJson) { + return expectThrows( + RetryException.class, + () -> responseHandler.validateResponse( + mock(), + mock(), + mockRequest(), + new HttpResult(mock500Response(), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + } + + private static Request mockRequest() { + var request = mock(Request.class); + when(request.getInferenceEntityId()).thenReturn("abc"); + when(request.isStreaming()).thenReturn(true); + return request; + } + + private static HttpResponse mock500Response() { + int statusCode = 500; + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(statusCode); + + var response = mock(HttpResponse.class); + when(response.getStatusLine()).thenReturn(statusLine); + + return response; + } + + private String toJson(UnifiedChatCompletionException e) throws IOException { + try (var builder = XContentFactory.jsonBuilder()) { + e.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> { + try { + xContent.toXContent(builder, EMPTY_PARAMS); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }); + return XContentHelper.convertToJson(BytesReference.bytes(builder), false, builder.contentType()); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java index 7cfd231be39f..637ae726572a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java @@ -11,6 +11,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.XContentFactory; import org.hamcrest.MatcherAssert; @@ -85,6 +86,16 @@ public record InferenceEventsAssertion(Iterator events, Throwable error, return this; } + public InferenceEventsAssertion hasErrorMatching(CheckedConsumer matcher) { + hasError(); + try { + matcher.accept(error); + } catch (Exception e) { + fail(e); + } + return this; + } + public InferenceEventsAssertion hasEvents(String... events) { Arrays.stream(events).forEach(this::hasEvent); return this; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index fdf8520b939f..5d66486731f5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -27,14 +27,17 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnifiedCompletionRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.core.ml.search.WeightedToken; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -44,11 +47,15 @@ import org.elasticsearch.xpack.inference.external.response.elastic.ElasticInfere import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; +import org.elasticsearch.xpack.inference.services.InferenceEventsAssertion; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorization; import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationHandler; import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationTests; +import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionModel; +import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.After; @@ -61,8 +68,10 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.getModelListenerForException; import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; @@ -76,6 +85,7 @@ import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.isA; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -951,6 +961,62 @@ public class ElasticInferenceServiceTests extends ESTestCase { } } + public void testUnifiedCompletionError() throws Exception { + var eisGatewayUrl = getUrl(webServer); + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = createService(senderFactory, eisGatewayUrl)) { + var responseJson = """ + { + "error": "The model `rainbow-sprinkles` does not exist or you do not have access to it." + }"""; + webServer.enqueue(new MockResponse().setResponseCode(404).setBody(responseJson)); + var model = new ElasticInferenceServiceCompletionModel( + "id", + TaskType.COMPLETION, + "elastic", + new ElasticInferenceServiceCompletionServiceSettings("model_id", new RateLimitSettings(100)), + EmptyTaskSettings.INSTANCE, + EmptySecretSettings.INSTANCE, + new ElasticInferenceServiceComponents(eisGatewayUrl) + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.unifiedCompletionInfer( + model, + UnifiedCompletionRequest.of( + List.of(new UnifiedCompletionRequest.Message(new UnifiedCompletionRequest.ContentString("hello"), "user", null, null)) + ), + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var result = listener.actionGet(TIMEOUT); + + InferenceEventsAssertion.assertThat(result).hasFinishedStream().hasNoEvents().hasErrorMatching(e -> { + e = unwrapCause(e); + assertThat(e, isA(UnifiedChatCompletionException.class)); + try (var builder = XContentFactory.jsonBuilder()) { + ((UnifiedChatCompletionException) e).toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> { + try { + xContent.toXContent(builder, EMPTY_PARAMS); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }); + var json = XContentHelper.convertToJson(BytesReference.bytes(builder), false, builder.contentType()); + + assertThat(json, is(""" + {\ + "error":{\ + "code":"not_found",\ + "message":"Received an unsuccessful status code for request from inference entity id [id] status \ + [404]. Error message: [The model `rainbow-sprinkles` does not exist or you do not have access to it.]",\ + "type":"error"\ + }}""")); + } + }); + } + } + private ElasticInferenceService createServiceWithMockSender() { return createServiceWithMockSender(ElasticInferenceServiceAuthorizationTests.createEnabledAuth()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 1615d46b349e..580871bb2c9a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -13,6 +13,7 @@ import org.apache.logging.log4j.Level; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; @@ -46,12 +47,14 @@ import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingF import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError; import org.elasticsearch.xpack.core.ml.MachineLearningField; +import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; +import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentStats; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResultsTests; @@ -67,11 +70,13 @@ import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.chunking.WordBoundaryChunkingSettings; import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; @@ -81,12 +86,14 @@ import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction.Response.RESULTS_FIELD; import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettingsMap; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86; @@ -101,6 +108,7 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class ElasticsearchInternalServiceTests extends ESTestCase { @@ -1632,6 +1640,67 @@ public class ElasticsearchInternalServiceTests extends ESTestCase { } } + public void testUpdateWithoutMlEnabled() throws IOException, InterruptedException { + var cs = mock(ClusterService.class); + var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(MachineLearningField.MAX_LAZY_ML_NODES)); + when(cs.getClusterSettings()).thenReturn(cSettings); + var context = new InferenceServiceExtension.InferenceServiceFactoryContext( + mock(), + threadPool, + cs, + Settings.builder().put("xpack.ml.enabled", false).build() + ); + try (var service = new ElasticsearchInternalService(context)) { + var models = List.of(mock(Model.class)); + var latch = new CountDownLatch(1); + service.updateModelsWithDynamicFields(models, ActionTestUtils.assertNoFailureListener(r -> { + latch.countDown(); + assertThat(r, Matchers.sameInstance(models)); + })); + assertTrue(latch.await(30, TimeUnit.SECONDS)); + } + } + + public void testUpdateWithMlEnabled() throws IOException, InterruptedException { + var deploymentId = "deploymentId"; + var model = mock(ElasticsearchInternalModel.class); + when(model.mlNodeDeploymentId()).thenReturn(deploymentId); + + AssignmentStats stats = mock(); + when(stats.getDeploymentId()).thenReturn(deploymentId); + when(stats.getNumberOfAllocations()).thenReturn(3); + + var client = mock(Client.class); + doAnswer(ans -> { + QueryPage queryPage = new QueryPage<>(List.of(stats), 1, RESULTS_FIELD); + + GetDeploymentStatsAction.Response response = mock(); + when(response.getStats()).thenReturn(queryPage); + + ActionListener listener = ans.getArgument(2); + listener.onResponse(response); + return null; + }).when(client).execute(eq(GetDeploymentStatsAction.INSTANCE), any(), any()); + when(client.threadPool()).thenReturn(threadPool); + + var cs = mock(ClusterService.class); + var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(MachineLearningField.MAX_LAZY_ML_NODES)); + when(cs.getClusterSettings()).thenReturn(cSettings); + var context = new InferenceServiceExtension.InferenceServiceFactoryContext( + client, + threadPool, + cs, + Settings.builder().put("xpack.ml.enabled", true).build() + ); + try (var service = new ElasticsearchInternalService(context)) { + List models = List.of(model); + var latch = new CountDownLatch(1); + service.updateModelsWithDynamicFields(models, ActionTestUtils.assertNoFailureListener(r -> latch.countDown())); + assertTrue(latch.await(30, TimeUnit.SECONDS)); + verify(model).updateNumAllocations(3); + } + } + private ElasticsearchInternalService createService(Client client) { var cs = mock(ClusterService.class); var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(MachineLearningField.MAX_LAZY_ML_NODES)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index ee93677538b3..34539042c1f0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -33,9 +33,11 @@ import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingFloat; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -62,8 +64,10 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.getRequestConfigMap; @@ -85,6 +89,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.isA; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -1062,6 +1067,59 @@ public class OpenAiServiceTests extends ESTestCase { } } + public void testUnifiedCompletionError() throws Exception { + String responseJson = """ + { + "error": { + "message": "The model `gpt-4awero` does not exist or you do not have access to it.", + "type": "invalid_request_error", + "param": null, + "code": "model_not_found" + } + }"""; + webServer.enqueue(new MockResponse().setResponseCode(404).setBody(responseJson)); + + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new OpenAiService(senderFactory, createWithEmptySettings(threadPool))) { + var model = OpenAiChatCompletionModelTests.createChatCompletionModel(getUrl(webServer), "org", "secret", "model", "user"); + PlainActionFuture listener = new PlainActionFuture<>(); + service.unifiedCompletionInfer( + model, + UnifiedCompletionRequest.of( + List.of(new UnifiedCompletionRequest.Message(new UnifiedCompletionRequest.ContentString("hello"), "user", null, null)) + ), + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var result = listener.actionGet(TIMEOUT); + + InferenceEventsAssertion.assertThat(result).hasFinishedStream().hasNoEvents().hasErrorMatching(e -> { + e = unwrapCause(e); + assertThat(e, isA(UnifiedChatCompletionException.class)); + try (var builder = XContentFactory.jsonBuilder()) { + ((UnifiedChatCompletionException) e).toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> { + try { + xContent.toXContent(builder, EMPTY_PARAMS); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }); + var json = XContentHelper.convertToJson(BytesReference.bytes(builder), false, builder.contentType()); + + assertThat(json, is(""" + {\ + "error":{\ + "code":"model_not_found",\ + "message":"Received an unsuccessful status code for request from inference entity id [id] status \ + [404]. Error message: [The model `gpt-4awero` does not exist or you do not have access to it.]",\ + "type":"invalid_request_error"\ + }}""")); + } + }); + } + } + public void testInfer_StreamRequest() throws Exception { String responseJson = """ data: {\ diff --git a/x-pack/plugin/logsdb/build.gradle b/x-pack/plugin/logsdb/build.gradle index bef07258a8e3..917f8207d2d2 100644 --- a/x-pack/plugin/logsdb/build.gradle +++ b/x-pack/plugin/logsdb/build.gradle @@ -42,3 +42,13 @@ tasks.named("javaRestTest").configure { tasks.named('yamlRestTest') { usesDefaultDistribution() } + +tasks.named("yamlRestTest") { + if (buildParams.isSnapshotBuild() == false) { + systemProperty 'tests.rest.blacklist', [ + "60_synthetic_source_recovery/*" + ].join(',') + } +} + + diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source_recovery.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source_recovery.yml new file mode 100644 index 000000000000..cc2216997c6d --- /dev/null +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source_recovery.yml @@ -0,0 +1,261 @@ +--- +synthetic recovery for synthetic source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_synthetic_recovery + body: + settings: + index: + mapping.source.mode: synthetic + + - do: + indices.get_settings: + index: test_synthetic_recovery + include_defaults: true + + - match: { test_synthetic_recovery.settings.index.mapping.source.mode: synthetic } + - match: { test_synthetic_recovery.defaults.index.recovery.use_synthetic_source: "true" } + +--- +synthetic recovery for stored source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_stored_recovery + body: + settings: + index: + mapping.source.mode: stored + + - do: + indices.get_settings: + index: test_stored_recovery + include_defaults: true + + - match: { test_stored_recovery.settings.index.mapping.source.mode: stored } + - match: { test_stored_recovery.defaults.index.recovery.use_synthetic_source: "false" } + +--- +synthetic recovery for disabled source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_disabled_recovery + body: + settings: + index: + mapping.source.mode: disabled + + - do: + indices.get_settings: + index: test_disabled_recovery + include_defaults: true + + - match: { test_disabled_recovery.settings.index.mapping.source.mode: disabled } + - match: { test_disabled_recovery.defaults.index.recovery.use_synthetic_source: "false" } + +--- +synthetic recovery for standard index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_standard_index_recovery + body: + settings: + index: + mode: standard + + - do: + indices.get_settings: + index: test_standard_index_recovery + include_defaults: true + + - match: { test_standard_index_recovery.defaults.index.recovery.use_synthetic_source: "false" } + +--- +synthetic recovery for logsdb index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_logsdb_index_recovery + body: + settings: + index: + mode: logsdb + + - do: + indices.get_settings: + index: test_logsdb_index_recovery + include_defaults: true + + - match: { test_logsdb_index_recovery.defaults.index.recovery.use_synthetic_source: "true" } + +--- +synthetic recovery for time_series index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_time_series_index_recovery + body: + settings: + index: + mode: time_series + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + keyword: + type: keyword + time_series_dimension: true + + - do: + indices.get_settings: + index: test_time_series_index_recovery + include_defaults: true + + - match: { test_time_series_index_recovery.defaults.index.recovery.use_synthetic_source: "true" } + +--- +override synthetic recovery for synthetic source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_synthetic_recovery_override + body: + settings: + index: + mapping.source.mode: synthetic + recovery.use_synthetic_source: false + + - do: + indices.get_settings: + index: test_synthetic_recovery_override + include_defaults: true + + - match: { test_synthetic_recovery_override.settings.index.mapping.source.mode: synthetic } + - match: { test_synthetic_recovery_override.settings.index.recovery.use_synthetic_source: "false" } + +--- +override synthetic recovery for stored source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + catch: bad_request + indices.create: + index: test_stored_recovery_override + body: + settings: + index: + mapping.source.mode: stored + recovery.use_synthetic_source: true + +--- +override synthetic recovery for disabled source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + catch: bad_request + indices.create: + index: test_disabled_recovery_override + body: + settings: + index: + mapping.source.mode: disabled + recovery.use_synthetic_source: true + +--- +override synthetic recovery for standard index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + catch: bad_request + indices.create: + index: test_standard_index_recovery_override + body: + settings: + index: + mode: standard + recovery.use_synthetic_source: true + +--- +override synthetic recovery for logsdb index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_logsdb_index_recovery_override + body: + settings: + index: + mode: logsdb + recovery.use_synthetic_source: false + + - do: + indices.get_settings: + index: test_logsdb_index_recovery_override + include_defaults: true + + - match: { test_logsdb_index_recovery_override.settings.index.recovery.use_synthetic_source: "false" } + +--- +override synthetic recovery for time_series index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_time_series_index_recovery_override + body: + settings: + index: + mode: time_series + recovery.use_synthetic_source: false + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + keyword: + type: keyword + time_series_dimension: true + + - do: + indices.get_settings: + index: test_time_series_index_recovery_override + include_defaults: true + + - match: { test_time_series_index_recovery_override.settings.index.recovery.use_synthetic_source: "false" } diff --git a/x-pack/plugin/migrate/build.gradle b/x-pack/plugin/migrate/build.gradle index 283362a637e7..f179a311e0fe 100644 --- a/x-pack/plugin/migrate/build.gradle +++ b/x-pack/plugin/migrate/build.gradle @@ -19,6 +19,7 @@ dependencies { testImplementation project(xpackModule('ccr')) testImplementation project(':modules:data-streams') testImplementation project(path: ':modules:reindex') + testImplementation project(path: ':modules:ingest-common') } addQaCheckDependencies(project) diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java index e22c26bd6b8b..37311ca57141 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java @@ -24,12 +24,17 @@ import org.elasticsearch.action.admin.indices.template.put.TransportPutComposabl import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.DeletePipelineTransportAction; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; @@ -38,12 +43,15 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.datastreams.DataStreamsPlugin; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.migrate.MigratePlugin; +import org.elasticsearch.xpack.migrate.MigrateTemplateRegistry; +import org.junit.After; import java.io.IOException; import java.time.Instant; @@ -56,19 +64,33 @@ import static java.lang.Boolean.parseBoolean; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { + @After + private void cleanupCluster() throws Exception { + safeGet( + clusterAdmin().execute( + DeletePipelineTransportAction.TYPE, + new DeletePipelineRequest( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + MigrateTemplateRegistry.REINDEX_DATA_STREAM_PIPELINE_NAME + ) + ) + ); + super.cleanUpCluster(); + } private static final String MAPPING = """ { "_doc":{ "dynamic":"strict", "properties":{ - "foo1":{ - "type":"text" - } + "foo1": {"type":"text"}, + "@timestamp": {"type":"date"} } } } @@ -76,23 +98,132 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return List.of(MigratePlugin.class, ReindexPlugin.class, MockTransportService.TestPlugin.class, DataStreamsPlugin.class); + return List.of( + MigratePlugin.class, + ReindexPlugin.class, + MockTransportService.TestPlugin.class, + DataStreamsPlugin.class, + IngestCommonPlugin.class + ); + } + + private static String DATA_STREAM_MAPPING = """ + { + "dynamic": true, + "_data_stream_timestamp": { + "enabled": true + }, + "properties": { + "@timestamp": {"type":"date"} + } + } + """; + + public void testTimestamp0AddedIfMissing() { + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); + + // add doc without timestamp + addDoc(sourceIndex, "{\"foo\":\"baz\"}"); + + // add timestamp to source mapping + indicesAdmin().preparePutMapping(sourceIndex).setSource(DATA_STREAM_MAPPING, XContentType.JSON).get(); + + // call reindex + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); + + assertResponse(prepareSearch(destIndex), response -> { + Map sourceAsMap = response.getHits().getAt(0).getSourceAsMap(); + assertEquals(Integer.valueOf(0), sourceAsMap.get(DEFAULT_TIMESTAMP_FIELD)); + }); + } + + public void testTimestampNotAddedIfExists() { + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); + + // add doc with timestamp + String time = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis()); + var doc = String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, time); + addDoc(sourceIndex, doc); + + // add timestamp to source mapping + indicesAdmin().preparePutMapping(sourceIndex).setSource(DATA_STREAM_MAPPING, XContentType.JSON).get(); + + // call reindex + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); + + assertResponse(prepareSearch(destIndex), response -> { + Map sourceAsMap = response.getHits().getAt(0).getSourceAsMap(); + assertEquals(time, sourceAsMap.get(DEFAULT_TIMESTAMP_FIELD)); + }); + } + + public void testCustomReindexPipeline() { + String customPipeline = """ + { + "processors": [ + { + "set": { + "field": "cheese", + "value": "gorgonzola" + } + } + ], + "version": 1000 + } + """; + + PutPipelineRequest putRequest = new PutPipelineRequest( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + MigrateTemplateRegistry.REINDEX_DATA_STREAM_PIPELINE_NAME, + new BytesArray(customPipeline), + XContentType.JSON + ); + + safeGet(clusterAdmin().execute(PutPipelineTransportAction.TYPE, putRequest)); + + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); + + // add doc with timestamp + String time = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis()); + var doc = String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, time); + addDoc(sourceIndex, doc); + + // add timestamp to source mapping + indicesAdmin().preparePutMapping(sourceIndex).setSource(DATA_STREAM_MAPPING, XContentType.JSON).get(); + + String destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); + + assertResponse(prepareSearch(destIndex), response -> { + Map sourceAsMap = response.getHits().getAt(0).getSourceAsMap(); + assertEquals("gorgonzola", sourceAsMap.get("cheese")); + assertEquals(time, sourceAsMap.get(DEFAULT_TIMESTAMP_FIELD)); + }); } public void testDestIndexDeletedIfExists() throws Exception { // empty source index var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // dest index with docs var destIndex = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); - indicesAdmin().create(new CreateIndexRequest(destIndex)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(destIndex))); indexDocs(destIndex, 10); - indicesAdmin().refresh(new RefreshRequest(destIndex)).actionGet(); + safeGet(indicesAdmin().refresh(new RefreshRequest(destIndex))); assertHitCount(prepareSearch(destIndex).setSize(0), 10); // call reindex - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); + safeGet(client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex))); // verify that dest still exists, but is now empty assertTrue(indexExists(destIndex)); @@ -101,11 +232,12 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { public void testDestIndexNameSet_noDotPrefix() throws Exception { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // call reindex - var response = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet(); + var response = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); var expectedDestIndexName = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); assertEquals(expectedDestIndexName, response.getDestIndex()); @@ -114,11 +246,12 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { public void testDestIndexNameSet_withDotPrefix() throws Exception { var sourceIndex = "." + randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // call reindex - var response = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet(); + var response = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); var expectedDestIndexName = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); assertEquals(expectedDestIndexName, response.getDestIndex()); @@ -128,13 +261,14 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { // source index with docs var numDocs = randomIntBetween(1, 100); var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); indexDocs(sourceIndex, numDocs); // call reindex - var response = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet(); - indicesAdmin().refresh(new RefreshRequest(response.getDestIndex())).actionGet(); + var response = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); + safeGet(indicesAdmin().refresh(new RefreshRequest(response.getDestIndex()))); // verify that dest contains docs assertHitCount(prepareSearch(response.getDestIndex()).setSize(0), numDocs); @@ -145,13 +279,13 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { // empty source index var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); // call reindex - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); + safeGet(client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex))); // Assert that source index is now read-only but not verified read-only - GetSettingsResponse getSettingsResponse = admin().indices().getSettings(new GetSettingsRequest().indices(sourceIndex)).actionGet(); + GetSettingsResponse getSettingsResponse = safeGet(admin().indices().getSettings(new GetSettingsRequest().indices(sourceIndex))); assertTrue(parseBoolean(getSettingsResponse.getSetting(sourceIndex, IndexMetadata.SETTING_BLOCKS_WRITE))); assertFalse( parseBoolean(getSettingsResponse.getSetting(sourceIndex, MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey())) @@ -159,16 +293,15 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { // assert that write to source fails var indexReq = new IndexRequest(sourceIndex).source(jsonBuilder().startObject().field("field", "1").endObject()); - assertThrows(ClusterBlockException.class, () -> client().index(indexReq).actionGet()); + expectThrows(ClusterBlockException.class, client().index(indexReq)); assertHitCount(prepareSearch(sourceIndex).setSize(0), 0); } public void testMissingSourceIndex() { var nonExistentSourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - assertThrows( + expectThrows( ResourceNotFoundException.class, - () -> client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(nonExistentSourceIndex)) - .actionGet() + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(nonExistentSourceIndex)) ); } @@ -177,7 +310,7 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { var numShards = randomIntBetween(1, 10); var staticSettings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards).build(); var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, staticSettings)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, staticSettings))); // update with a dynamic setting var numReplicas = randomIntBetween(0, 10); @@ -186,31 +319,32 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval) .build(); - indicesAdmin().updateSettings(new UpdateSettingsRequest(dynamicSettings, sourceIndex)).actionGet(); + safeGet(indicesAdmin().updateSettings(new UpdateSettingsRequest(dynamicSettings, sourceIndex))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); // assert both static and dynamic settings set on dest index - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex))); assertEquals(numReplicas, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS))); assertEquals(numShards, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS))); assertEquals(refreshInterval, settingsResponse.getSetting(destIndex, IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey())); } - public void testMappingsAddedToDestIndex() throws Exception { + public void testMappingsAddedToDestIndex() { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(MAPPING)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(MAPPING))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); - var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) - .actionGet(); + var mappingsResponse = safeGet( + indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) + ); Map mappings = mappingsResponse.mappings(); var destMappings = mappings.get(destIndex).sourceAsMap(); var sourceMappings = mappings.get(sourceIndex).sourceAsMap(); @@ -223,13 +357,13 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { public void testFailIfMetadataBlockSet() { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); var settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_METADATA, true).build(); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); - try { - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); - } catch (ElasticsearchException e) { - assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); - } + ElasticsearchException e = expectThrows( + ElasticsearchException.class, + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); + assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); cleanupMetadataBlocks(sourceIndex); } @@ -237,13 +371,13 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { public void testFailIfReadBlockSet() { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); var settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_READ, true).build(); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); - try { - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); - } catch (ElasticsearchException e) { - assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); - } + ElasticsearchException e = expectThrows( + ElasticsearchException.class, + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); + assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); cleanupMetadataBlocks(sourceIndex); } @@ -255,14 +389,14 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { .put(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE, randomBoolean()) .put(IndexMetadata.SETTING_BLOCKS_WRITE, randomBoolean()) .build(); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex))); assertFalse(parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY))); assertFalse(parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE))); assertFalse(parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_BLOCKS_WRITE))); @@ -284,11 +418,11 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { assertAcked(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(sourceIndex, destIndex)).actionGet(); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(sourceIndex, destIndex))); var destSettings = settingsResponse.getIndexToSettings().get(destIndex); assertEquals( @@ -317,33 +451,34 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { .build(); var request = new TransportPutComposableIndexTemplateAction.Request("logs-template"); request.indexTemplate(template); - client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); + safeGet(client().execute(TransportPutComposableIndexTemplateAction.TYPE, request)); var sourceIndex = "logs-" + randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); { var indexRequest = new IndexRequest(sourceIndex); indexRequest.source("{ \"foo1\": \"cheese\" }", XContentType.JSON); - client().index(indexRequest).actionGet(); + safeGet(client().index(indexRequest)); } // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); // verify settings from templates copied to dest index { - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex))); assertEquals(numReplicas, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS))); assertEquals(numShards, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS))); } // verify mappings from templates copied to dest index { - var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) - .actionGet(); + var mappingsResponse = safeGet( + indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) + ); var destMappings = mappingsResponse.mappings().get(destIndex).sourceAsMap(); var sourceMappings = mappingsResponse.mappings().get(sourceIndex).sourceAsMap(); assertEquals(sourceMappings, destMappings); @@ -404,7 +539,7 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) .build() ); - client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); + safeGet(client().execute(TransportPutComposableIndexTemplateAction.TYPE, request)); // index doc Instant time = Instant.now(); @@ -412,12 +547,11 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { { var indexRequest = new IndexRequest("k8s").opType(DocWriteRequest.OpType.CREATE); indexRequest.source(TSDB_DOC.replace("$time", formatInstant(time)), XContentType.JSON); - var indexResponse = client().index(indexRequest).actionGet(); + var indexResponse = safeGet(client().index(indexRequest)); backingIndexName = indexResponse.getIndex(); } - var sourceSettings = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndexName)) - .actionGet() + var sourceSettings = safeGet(indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndexName))) .getSettings() .get(backingIndexName); Instant startTime = IndexSettings.TIME_SERIES_START_TIME.get(sourceSettings); @@ -430,17 +564,15 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { // force a rollover so can call reindex and delete var rolloverRequest = new RolloverRequest("k8s", null); - var rolloverResponse = indicesAdmin().rolloverIndex(rolloverRequest).actionGet(); + var rolloverResponse = safeGet(indicesAdmin().rolloverIndex(rolloverRequest)); rolloverResponse.getNewIndex(); // call reindex on the original backing index - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(backingIndexName)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(backingIndexName)) + ).getDestIndex(); - var destSettings = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(destIndex)) - .actionGet() - .getSettings() + var destSettings = safeGet(indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(destIndex))).getSettings() .get(destIndex); var destStart = IndexSettings.TIME_SERIES_START_TIME.get(destSettings); var destEnd = IndexSettings.TIME_SERIES_END_TIME.get(destSettings); @@ -458,7 +590,7 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { .putNull(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE) .putNull(IndexMetadata.SETTING_BLOCKS_METADATA) .build(); - assertAcked(indicesAdmin().updateSettings(new UpdateSettingsRequest(settings, index)).actionGet()); + safeGet(indicesAdmin().updateSettings(new UpdateSettingsRequest(settings, index))); } private static void indexDocs(String index, int numDocs) { @@ -471,7 +603,7 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { .source(String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, value), XContentType.JSON) ); } - BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); + BulkResponse bulkResponse = safeGet(client().bulk(bulkRequest)); assertThat(bulkResponse.getItems().length, equalTo(numDocs)); } @@ -479,12 +611,9 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); } - private static String getIndexUUID(String index) { - return indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(index)) - .actionGet() - .getSettings() - .get(index) - .get(IndexMetadata.SETTING_INDEX_UUID); + void addDoc(String index, String doc) { + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(index).opType(DocWriteRequest.OpType.CREATE).source(doc, XContentType.JSON)); + safeGet(client().bulk(bulkRequest)); } - } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java index f5f8beba26d8..7811e84ac9f5 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java @@ -55,6 +55,7 @@ import org.elasticsearch.xpack.migrate.task.ReindexDataStreamTask; import org.elasticsearch.xpack.migrate.task.ReindexDataStreamTaskParams; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.function.Predicate; import java.util.function.Supplier; @@ -64,6 +65,18 @@ import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamIndexTrans import static org.elasticsearch.xpack.migrate.task.ReindexDataStreamPersistentTaskExecutor.MAX_CONCURRENT_INDICES_REINDEXED_PER_DATA_STREAM_SETTING; public class MigratePlugin extends Plugin implements ActionPlugin, PersistentTaskPlugin { + @Override + public Collection createComponents(PluginServices services) { + var registry = new MigrateTemplateRegistry( + services.environment().settings(), + services.clusterService(), + services.threadPool(), + services.client(), + services.xContentRegistry() + ); + registry.initialize(); + return List.of(registry); + } @Override public List getRestHandlers( diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigrateTemplateRegistry.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigrateTemplateRegistry.java new file mode 100644 index 000000000000..2a9dc97e1635 --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigrateTemplateRegistry.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.migrate; + +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.template.IndexTemplateRegistry; +import org.elasticsearch.xpack.core.template.IngestPipelineConfig; +import org.elasticsearch.xpack.core.template.JsonIngestPipelineConfig; + +import java.util.List; + +public class MigrateTemplateRegistry extends IndexTemplateRegistry { + + // This number must be incremented when we make changes to built-in pipeline. + // If a specific user pipeline is needed instead, its version should be set to a value higher than the REGISTRY_VERSION. + static final int REGISTRY_VERSION = 1; + public static final String REINDEX_DATA_STREAM_PIPELINE_NAME = "reindex-data-stream-pipeline"; + private static final String TEMPLATE_VERSION_VARIABLE = "xpack.migrate.reindex.pipeline.version"; + + public MigrateTemplateRegistry( + Settings nodeSettings, + ClusterService clusterService, + ThreadPool threadPool, + Client client, + NamedXContentRegistry xContentRegistry + ) { + super(nodeSettings, clusterService, threadPool, client, xContentRegistry); + } + + @Override + protected List getIngestPipelines() { + return List.of( + new JsonIngestPipelineConfig( + REINDEX_DATA_STREAM_PIPELINE_NAME, + "/" + REINDEX_DATA_STREAM_PIPELINE_NAME + ".json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ) + ); + } + + @Override + protected String getOrigin() { + return ClientHelper.STACK_ORIGIN; + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java index df4bc5c4ba55..ef1e2b9a85c7 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java @@ -53,6 +53,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.deprecation.DeprecatedIndexPredicate; +import org.elasticsearch.xpack.migrate.MigrateTemplateRegistry; import java.util.Locale; import java.util.Map; @@ -272,6 +273,7 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio logger.debug("Reindex to destination index [{}] from source index [{}]", destIndexName, sourceIndexName); var reindexRequest = new ReindexRequest(); reindexRequest.setSourceIndices(sourceIndexName); + reindexRequest.setDestPipeline(MigrateTemplateRegistry.REINDEX_DATA_STREAM_PIPELINE_NAME); reindexRequest.getSearchRequest().allowPartialSearchResults(false); reindexRequest.getSearchRequest().source().fetchSource(true); reindexRequest.setDestIndex(destIndexName); diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index a63d911e9d40..e33fe677179d 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -109,7 +109,7 @@ public class MachineLearningPackageLoader extends Plugin implements ActionPlugin @Override public BootstrapCheckResult check(BootstrapContext context) { try { - validateModelRepository(MODEL_REPOSITORY.get(context.settings()), context.environment().configFile()); + validateModelRepository(MODEL_REPOSITORY.get(context.settings()), context.environment().configDir()); } catch (Exception e) { return BootstrapCheckResult.failure( "Found an invalid configuration for xpack.ml.model_repository. " diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java index 5e24393be0a2..6e377770ed0e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java @@ -149,7 +149,7 @@ public class NativeAnalyticsProcessFactory implements AnalyticsProcessFactory scheduledEventToRuleWriters = scheduledEvents.stream() @@ -252,8 +252,8 @@ public class AutodetectBuilder { } private void buildJobConfig(List command) throws IOException { - FileUtils.recreateTempDirectoryIfNeeded(env.tmpFile()); - Path configFile = Files.createTempFile(env.tmpFile(), "config", JSON_EXTENSION); + FileUtils.recreateTempDirectoryIfNeeded(env.tmpDir()); + Path configFile = Files.createTempFile(env.tmpDir(), "config", JSON_EXTENSION); filesToDelete.add(configFile); try ( OutputStreamWriter osw = new OutputStreamWriter(Files.newOutputStream(configFile), StandardCharsets.UTF_8); @@ -271,8 +271,8 @@ public class AutodetectBuilder { if (referencedFilters.isEmpty()) { return; } - FileUtils.recreateTempDirectoryIfNeeded(env.tmpFile()); - Path filtersConfigFile = Files.createTempFile(env.tmpFile(), "filtersConfig", JSON_EXTENSION); + FileUtils.recreateTempDirectoryIfNeeded(env.tmpDir()); + Path filtersConfigFile = Files.createTempFile(env.tmpDir(), "filtersConfig", JSON_EXTENSION); filesToDelete.add(filtersConfigFile); try ( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java index df97b39d2e39..594f72398bc9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java @@ -52,7 +52,7 @@ public class NativeStorageProvider { */ public void cleanupLocalTmpStorageInCaseOfUncleanShutdown() { try { - for (Path p : environment.dataFiles()) { + for (Path p : environment.dataDirs()) { IOUtils.rm(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER)); } } catch (Exception e) { @@ -79,7 +79,7 @@ public class NativeStorageProvider { } private Path tryAllocateStorage(String uniqueIdentifier, ByteSizeValue requestedSize) { - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { try { if (getUsableSpace(path) >= requestedSize.getBytes() + minLocalStorageAvailable.getBytes()) { Path tmpDirectory = path.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER).resolve(uniqueIdentifier); @@ -97,7 +97,7 @@ public class NativeStorageProvider { public boolean localTmpStorageHasEnoughSpace(Path path, ByteSizeValue requestedSize) { Path realPath = path.toAbsolutePath(); - for (Path p : environment.dataFiles()) { + for (Path p : environment.dataDirs()) { try { if (realPath.startsWith(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER))) { return getUsableSpace(p) >= requestedSize.getBytes() + minLocalStorageAvailable.getBytes(); @@ -122,7 +122,7 @@ public class NativeStorageProvider { if (path != null) { // do not allow to breakout from the tmp storage provided Path realPath = path.toAbsolutePath(); - for (Path p : environment.dataFiles()) { + for (Path p : environment.dataDirs()) { if (realPath.startsWith(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER))) { IOUtils.rm(path); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java index 6b09e38b02ea..9f8378a5b008 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java @@ -94,7 +94,7 @@ public class ProcessPipes { ) { this.namedPipeHelper = namedPipeHelper; this.jobId = jobId; - this.tempDir = env.tmpFile(); + this.tempDir = env.tmpDir(); this.timeout = timeout; // The way the pipe names are formed MUST match what is done in the controller main() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java index 6a5e328d7530..84b00aca81f7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java @@ -78,7 +78,7 @@ public class NamedPipeHelper { // All these factors need to align for everything to work in production. If any changes // are made here then CNamedPipeFactory::defaultPath() in the C++ code will probably // also need to be changed. - return env.tmpFile().toString() + PathUtils.getDefaultFileSystem().getSeparator(); + return env.tmpDir().toString() + PathUtils.getDefaultFileSystem().getSeparator(); } /** diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java index f2a4add8444b..22a6ff630f2b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java @@ -123,7 +123,7 @@ public class NativeStorageProviderTests extends ESTestCase { private NativeStorageProvider createNativeStorageProvider(Map paths) throws IOException { Environment environment = mock(Environment.class); - when(environment.dataFiles()).thenReturn(paths.keySet().toArray(new Path[paths.size()])); + when(environment.dataDirs()).thenReturn(paths.keySet().toArray(new Path[paths.size()])); NativeStorageProvider storageProvider = spy(new NativeStorageProvider(environment, ByteSizeValue.ofGb(5))); doAnswer( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java index d09f0cbb59c1..fc1b5abc04fb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java @@ -67,7 +67,7 @@ public class NamedPipeHelperTests extends ESTestCase { Environment env = TestEnvironment.newEnvironment( Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build() ); - Path tempFile = Files.createTempFile(env.tmpFile(), "not a named pipe", null); + Path tempFile = Files.createTempFile(env.tmpDir(), "not a named pipe", null); IOException ioe = ESTestCase.expectThrows( IOException.class, @@ -83,7 +83,7 @@ public class NamedPipeHelperTests extends ESTestCase { Environment env = TestEnvironment.newEnvironment( Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build() ); - Path tempFile = Files.createTempFile(env.tmpFile(), "not a named pipe", null); + Path tempFile = Files.createTempFile(env.tmpDir(), "not a named pipe", null); IOException ioe = ESTestCase.expectThrows( IOException.class, diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java index 2d8ddfbebc0f..4dfc5dd5e016 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java @@ -823,7 +823,7 @@ public class SearchableSnapshotsIntegTests extends BaseSearchableSnapshotsIntegT final String tmpRepositoryName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); createRepositoryNoVerify(tmpRepositoryName, "fs"); final Path repoPath = internalCluster().getCurrentMasterNodeInstance(Environment.class) - .resolveRepoFile( + .resolveRepoDir( clusterAdmin().prepareGetRepositories(TEST_REQUEST_TIMEOUT, tmpRepositoryName) .get() .repositories() diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java index c955457b78d6..3534988b25ce 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java @@ -145,7 +145,7 @@ public class SearchableSnapshotsPrewarmingIntegTests extends ESSingleNodeTestCas docsPerIndex.put(indexName, nbDocs); } - final Path repositoryPath = node().getEnvironment().resolveRepoFile(randomAlphaOfLength(10)); + final Path repositoryPath = node().getEnvironment().resolveRepoDir(randomAlphaOfLength(10)); final Settings.Builder repositorySettings = Settings.builder().put("location", repositoryPath); if (randomBoolean()) { repositorySettings.put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java index 53ea908ad880..3d4d7f768c1b 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java @@ -98,7 +98,7 @@ public class FrozenIndexInputTests extends AbstractSearchableSnapshotsTestCase { .put("path.home", createTempDir()) .build(); final Environment environment = TestEnvironment.newEnvironment(settings); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { Files.createDirectories(path); } SnapshotId snapshotId = new SnapshotId("_name", "_uuid"); diff --git a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java index 3994fb50c7fc..dbe0e0b0e957 100644 --- a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java @@ -163,7 +163,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { final boolean inEnrollmentMode = options.has(enrollmentTokenParam); // skipping security auto-configuration because node considered as restarting. - for (Path dataPath : env.dataFiles()) { + for (Path dataPath : env.dataDirs()) { if (Files.isDirectory(dataPath) && false == isDirEmpty(dataPath)) { final String msg = "Skipping security auto configuration because it appears that the node is not starting up for the " + "first time. The node might already be part of a cluster and this auto setup utility is designed to configure " @@ -173,7 +173,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { } // pre-flight checks for the files that are going to be changed - final Path ymlPath = env.configFile().resolve("elasticsearch.yml"); + final Path ymlPath = env.configDir().resolve("elasticsearch.yml"); // it is odd for the `elasticsearch.yml` file to be missing or not be a regular (the node won't start) // but auto configuration should not be concerned with fixing it (by creating the file) and let the node startup fail if (false == Files.exists(ymlPath) || false == Files.isRegularFile(ymlPath, LinkOption.NOFOLLOW_LINKS)) { @@ -194,7 +194,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { ); notifyOfFailure(inEnrollmentMode, terminal, Terminal.Verbosity.NORMAL, ExitCodes.NOOP, msg); } - final Path keystorePath = KeyStoreWrapper.keystorePath(env.configFile()); + final Path keystorePath = KeyStoreWrapper.keystorePath(env.configDir()); // Inform that auto-configuration will not run if keystore cannot be read. if (Files.exists(keystorePath) && (false == Files.isRegularFile(keystorePath, LinkOption.NOFOLLOW_LINKS) || false == Files.isReadable(keystorePath))) { @@ -218,7 +218,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { checkExistingConfiguration(env.settings(), inEnrollmentMode, terminal); final ZonedDateTime autoConfigDate = ZonedDateTime.now(ZoneOffset.UTC); - final Path tempGeneratedTlsCertsDir = env.configFile() + final Path tempGeneratedTlsCertsDir = env.configDir() .resolve(String.format(Locale.ROOT, TLS_GENERATED_CERTS_DIR_NAME + ".%d.tmp", autoConfigDate.toInstant().getEpochSecond())); try { // it is useful to pre-create the sub-config dir in order to check that the config dir is writable and that file owners match @@ -247,12 +247,12 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { // If the node process works OK given the owner of the config dir, it should also tolerate the auto-created config dir, // provided that they both have the same owner and permissions. final UserPrincipal newFileOwner = Files.getOwner(tempGeneratedTlsCertsDir, LinkOption.NOFOLLOW_LINKS); - if (false == newFileOwner.equals(Files.getOwner(env.configFile(), LinkOption.NOFOLLOW_LINKS))) { + if (false == newFileOwner.equals(Files.getOwner(env.configDir(), LinkOption.NOFOLLOW_LINKS))) { // the following is only printed once, if the node starts successfully UserException userException = new UserException( ExitCodes.CONFIG, "Aborting auto configuration because of config dir ownership mismatch. Config dir is owned by " - + Files.getOwner(env.configFile(), LinkOption.NOFOLLOW_LINKS).getName() + + Files.getOwner(env.configDir(), LinkOption.NOFOLLOW_LINKS).getName() + " but auto-configuration directory would be owned by " + newFileOwner.getName() ); @@ -496,7 +496,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { } // save the existing keystore before replacing - final Path keystoreBackupPath = env.configFile() + final Path keystoreBackupPath = env.configDir() .resolve( String.format(Locale.ROOT, KeyStoreWrapper.KEYSTORE_FILENAME + ".%d.orig", autoConfigDate.toInstant().getEpochSecond()) ); @@ -514,7 +514,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { } final SetOnce nodeKeystorePassword = new SetOnce<>(); - try (KeyStoreWrapper nodeKeystore = KeyStoreWrapper.bootstrap(env.configFile(), () -> { + try (KeyStoreWrapper nodeKeystore = KeyStoreWrapper.bootstrap(env.configDir(), () -> { nodeKeystorePassword.set(new SecureString(terminal.readSecret(""))); return nodeKeystorePassword.get().clone(); })) { @@ -581,7 +581,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { nodeKeystore.setString("xpack.security.http.ssl.keystore.secure_password", httpKeystorePassword.getChars()); } // finally overwrites the node keystore (if the keystores have been successfully written) - nodeKeystore.save(env.configFile(), nodeKeystorePassword.get() == null ? new char[0] : nodeKeystorePassword.get().getChars()); + nodeKeystore.save(env.configDir(), nodeKeystorePassword.get() == null ? new char[0] : nodeKeystorePassword.get().getChars()); } catch (Throwable t) { // restore keystore to revert possible keystore bootstrap try { @@ -614,10 +614,10 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { try { // all certs and keys have been generated in the temp certs dir, therefore: // 1. backup (move) any previously existing tls certs dir (this backup is NOT removed when auto-conf finishes) - if (Files.exists(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { + if (Files.exists(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { moveDirectory( - env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME), - env.configFile() + env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME), + env.configDir() .resolve( String.format( Locale.ROOT, @@ -628,7 +628,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { ); } // 2. move the newly populated temp certs dir to its permanent static dir name - moveDirectory(tempGeneratedTlsCertsDir, env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + moveDirectory(tempGeneratedTlsCertsDir, env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } catch (Throwable t) { // restore keystore to revert possible keystore bootstrap try { @@ -649,7 +649,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { // revert any previously existing TLS certs try { if (Files.exists( - env.configFile() + env.configDir() .resolve( String.format( Locale.ROOT, @@ -659,7 +659,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { ) )) { moveDirectory( - env.configFile() + env.configDir() .resolve( String.format( Locale.ROOT, @@ -667,7 +667,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { autoConfigDate.toInstant().getEpochSecond() ) ), - env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME) + env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME) ); } } catch (Exception ex) { @@ -686,7 +686,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { final Environment localFinalEnv = env; final DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("dd-MM-yyyy HH:mm:ss", Locale.ROOT); List existingConfigLines = Files.readAllLines(ymlPath, StandardCharsets.UTF_8); - fullyWriteFile(env.configFile(), "elasticsearch.yml", true, stream -> { + fullyWriteFile(env.configDir(), "elasticsearch.yml", true, stream -> { try (BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(stream, StandardCharsets.UTF_8))) { // start with the existing config lines for (String line : existingConfigLines) { @@ -827,16 +827,16 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { } try { // this removes a statically named directory, so it is potentially dangerous - deleteDirectory(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + deleteDirectory(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } catch (Exception ex) { t.addSuppressed(ex); } - Path backupCertsDir = env.configFile() + Path backupCertsDir = env.configDir() .resolve( String.format(Locale.ROOT, TLS_GENERATED_CERTS_DIR_NAME + ".%d.orig", autoConfigDate.toInstant().getEpochSecond()) ); if (Files.exists(backupCertsDir)) { - moveDirectory(backupCertsDir, env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + moveDirectory(backupCertsDir, env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } throw t; } @@ -887,14 +887,14 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { // with --enrolment-token token, in the first place. final List existingConfigLines; try { - existingConfigLines = Files.readAllLines(env.configFile().resolve("elasticsearch.yml"), StandardCharsets.UTF_8); + existingConfigLines = Files.readAllLines(env.configDir().resolve("elasticsearch.yml"), StandardCharsets.UTF_8); } catch (IOException e) { // This shouldn't happen, we would have failed earlier but we need to catch the exception throw new UserException(ExitCodes.IO_ERROR, "Aborting enrolling to cluster. Unable to read elasticsearch.yml.", e); } final List existingConfigWithoutAutoconfiguration = removePreviousAutoconfiguration(existingConfigLines); if (false == existingConfigLines.equals(existingConfigWithoutAutoconfiguration) - && Files.exists(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { + && Files.exists(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { terminal.println(""); terminal.println("This node will be reconfigured to join an existing cluster, using the enrollment token that you provided."); terminal.println("This operation will overwrite the existing configuration. Specifically: "); @@ -907,7 +907,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { } removeAutoConfigurationFromKeystore(env, terminal); try { - fullyWriteFile(env.configFile(), "elasticsearch.yml", true, stream -> { + fullyWriteFile(env.configDir(), "elasticsearch.yml", true, stream -> { try (BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(stream, StandardCharsets.UTF_8))) { for (String l : existingConfigWithoutAutoconfiguration) { bw.write(l); @@ -915,7 +915,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { } } }); - deleteDirectory(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + deleteDirectory(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } catch (Throwable t) { throw new UserException( ExitCodes.IO_ERROR, @@ -1262,9 +1262,9 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { } private static void removeAutoConfigurationFromKeystore(Environment env, Terminal terminal) throws UserException { - if (Files.exists(KeyStoreWrapper.keystorePath(env.configFile()))) { + if (Files.exists(KeyStoreWrapper.keystorePath(env.configDir()))) { try ( - KeyStoreWrapper existingKeystore = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper existingKeystore = KeyStoreWrapper.load(env.configDir()); SecureString keystorePassword = existingKeystore.hasPassword() ? new SecureString(terminal.readSecret("Enter password for the elasticsearch keystore: ")) : new SecureString(new char[0]); @@ -1288,7 +1288,7 @@ public class AutoConfigureNode extends EnvironmentAwareCommand { } existingKeystore.remove(setting); } - existingKeystore.save(env.configFile(), keystorePassword.getChars()); + existingKeystore.save(env.configDir(), keystorePassword.getChars()); } catch (Exception e) { terminal.errorPrintln(Terminal.Verbosity.VERBOSE, ""); terminal.errorPrintln(Terminal.Verbosity.VERBOSE, ExceptionsHelper.stackTrace(e)); diff --git a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java index b67bb9898991..0e96911405b3 100644 --- a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java @@ -508,7 +508,7 @@ class HttpCertificateCommand extends EnvironmentAwareCommand { map.put("DATE", now.format(DateTimeFormatter.ISO_LOCAL_DATE)); map.put("TIME", now.format(DateTimeFormatter.ISO_OFFSET_TIME)); map.put("VERSION", Version.CURRENT.toString()); - map.put("CONF_DIR", env.configFile().toAbsolutePath().toString()); + map.put("CONF_DIR", env.configDir().toAbsolutePath().toString()); map.putAll(entries); return map; } @@ -1116,7 +1116,7 @@ class HttpCertificateCommand extends EnvironmentAwareCommand { private static Path requestPath(String prompt, Terminal terminal, Environment env, boolean requireExisting) { for (;;) { final String input = terminal.readText(prompt); - final Path path = env.configFile().resolve(input).toAbsolutePath(); + final Path path = env.configDir().resolve(input).toAbsolutePath(); if (path.getFileName() == null) { terminal.println(Terminal.Verbosity.SILENT, input + " is not a valid file"); diff --git a/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/AutoConfigureNodeTests.java b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/AutoConfigureNodeTests.java index 129d85d0818b..a03d9a7822e8 100644 --- a/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/AutoConfigureNodeTests.java +++ b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/AutoConfigureNodeTests.java @@ -311,7 +311,7 @@ public class AutoConfigureNodeTests extends ESTestCase { SecureString httpKeystorePassword = nodeKeystore.getString("xpack.security.http.ssl.keystore.secure_password"); SecureString transportKeystorePassword = nodeKeystore.getString("xpack.security.transport.ssl.keystore.secure_password"); - final Settings newSettings = Settings.builder().loadFromPath(env.configFile().resolve("elasticsearch.yml")).build(); + final Settings newSettings = Settings.builder().loadFromPath(env.configDir().resolve("elasticsearch.yml")).build(); final String httpKeystorePath = newSettings.get("xpack.security.http.ssl.keystore.path"); final String transportKeystorePath = newSettings.get("xpack.security.transport.ssl.keystore.path"); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java index 44f7a6d47e36..b1fda5f6c4e6 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java @@ -55,7 +55,7 @@ public class SSLReloadDuringStartupIntegTests extends SecurityIntegTestCase { Environment tmpEnv = TestEnvironment.newEnvironment(settings); // For each node, copy the original testnode.jks into each node's config directory. - Path nodeKeystorePath = tmpEnv.configFile().resolve("testnode.jks"); + Path nodeKeystorePath = tmpEnv.configDir().resolve("testnode.jks"); try { Path goodKeystorePath = getDataPath(goodKeyStoreFilePath); Files.copy(goodKeystorePath, nodeKeystorePath, StandardCopyOption.REPLACE_EXISTING); @@ -93,7 +93,7 @@ public class SSLReloadDuringStartupIntegTests extends SecurityIntegTestCase { final Environment env = internalCluster().getInstance(Environment.class, nodeName); final CountDownLatch beforeKeystoreFix = new CountDownLatch(2); // SYNC: Cert update & ES restart final CountDownLatch afterKeystoreFix = new CountDownLatch(1); // SYNC: Verify cluster after cert update - final Path nodeKeystorePath = env.configFile().resolve("testnode.jks"); // all nodes have good keystore + final Path nodeKeystorePath = env.configDir().resolve("testnode.jks"); // all nodes have good keystore final Path badKeystorePath = getDataPath(badKeyStoreFilePath); // stop a node, and apply this bad keystore final Path goodKeystorePath = getDataPath(goodKeyStoreFilePath); // start the node, and apply this good keystore assertTrue(Files.exists(nodeKeystorePath)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 3b94795a5746..d0c159aab81e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -730,9 +730,9 @@ public class Security extends Plugin * ES has already checked the file is actually in the config directory */ public static Path resolveSecuredConfigFile(Environment env, String file) { - Path config = env.configFile().resolve(file); + Path config = env.configDir().resolve(file); if (doPrivileged((PrivilegedAction) () -> Files.exists(config)) == false) { - Path legacyConfig = env.configFile().resolve("x-pack").resolve(file); + Path legacyConfig = env.configDir().resolve("x-pack").resolve(file); if (doPrivileged((PrivilegedAction) () -> Files.exists(legacyConfig))) { DeprecationLogger.getLogger(XPackPlugin.class) .warn( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java index 0718742d362c..f04c670eb1ea 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java @@ -43,7 +43,7 @@ class ResetPasswordTool extends BaseRunAsSuperuserCommand { private final OptionSpec usernameOption; ResetPasswordTool() { - this(CommandLineHttpClient::new, environment -> KeyStoreWrapper.load(environment.configFile())); + this(CommandLineHttpClient::new, environment -> KeyStoreWrapper.load(environment.configDir())); } protected ResetPasswordTool( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java index 91c75c076881..3c7fa029d451 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java @@ -95,11 +95,11 @@ class SetupPasswordTool extends MultiCommand { SetupPasswordTool() { this(environment -> new CommandLineHttpClient(environment), environment -> { - KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(environment.configFile()); + KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(environment.configDir()); if (keyStoreWrapper == null) { throw new UserException( ExitCodes.CONFIG, - "Elasticsearch keystore file is missing [" + KeyStoreWrapper.keystorePath(environment.configFile()) + "]" + "Elasticsearch keystore file is missing [" + KeyStoreWrapper.keystorePath(environment.configDir()) + "]" ); } return keyStoreWrapper; @@ -142,7 +142,7 @@ class SetupPasswordTool extends MultiCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configFile()); + terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configDir()); setupOptions(terminal, options, env); checkElasticKeystorePasswordValid(terminal, env); checkClusterHealth(terminal); @@ -198,7 +198,7 @@ class SetupPasswordTool extends MultiCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configFile()); + terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configDir()); setupOptions(terminal, options, env); checkElasticKeystorePasswordValid(terminal, env); checkClusterHealth(terminal); @@ -298,7 +298,7 @@ class SetupPasswordTool extends MultiCommand { Settings settings = settingsBuilder.build(); elasticUserPassword = ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.get(settings); - final Environment newEnv = new Environment(settings, env.configFile()); + final Environment newEnv = new Environment(settings, env.configDir()); Environment.assertEquivalent(newEnv, env); client = clientFunction.apply(newEnv); @@ -354,7 +354,7 @@ class SetupPasswordTool extends MultiCommand { terminal.errorPrintln("Possible causes include:"); terminal.errorPrintln(" * The password for the '" + elasticUser + "' user has already been changed on this cluster"); terminal.errorPrintln(" * Your elasticsearch node is running against a different keystore"); - terminal.errorPrintln(" This tool used the keystore at " + KeyStoreWrapper.keystorePath(env.configFile())); + terminal.errorPrintln(" This tool used the keystore at " + KeyStoreWrapper.keystorePath(env.configDir())); terminal.errorPrintln(""); terminal.errorPrintln( "You can use the `elasticsearch-reset-password` CLI tool to reset the password of the '" + elasticUser + "' user" diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java index 8b3f8ec09675..0fafd6b63c03 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java @@ -338,7 +338,7 @@ public class JwtUtil { } public static Path resolvePath(final Environment environment, final String jwkSetPath) { - final Path directoryPath = environment.configFile(); + final Path directoryPath = environment.configDir(); return directoryPath.resolve(jwkSetPath); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java index d5ef90f7f166..65e72568cacf 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java @@ -101,7 +101,7 @@ public final class KerberosRealm extends Realm implements CachingRealm { } this.kerberosTicketValidator = kerberosTicketValidator; this.threadPool = threadPool; - this.keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + this.keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); validateKeytab(this.keytabPath); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java index aa1946f44567..65d2492e3b6b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java @@ -365,7 +365,7 @@ public class OpenIdConnectAuthenticator { * @throws IOException if the file cannot be read */ private JWKSet readJwkSetFromFile(String jwkSetPath) throws IOException, ParseException { - final Path path = realmConfig.env().configFile().resolve(jwkSetPath); + final Path path = realmConfig.env().configDir().resolve(jwkSetPath); // avoid using JWKSet.loadFile() as it does not close FileInputStream internally try { String jwkSet = AccessController.doPrivileged( @@ -814,7 +814,7 @@ public class OpenIdConnectAuthenticator { } private void setMetadataFileWatcher(String jwkSetPath) throws IOException { - final Path path = realmConfig.env().configFile().resolve(jwkSetPath); + final Path path = realmConfig.env().configDir().resolve(jwkSetPath); FileWatcher watcher = new PrivilegedFileWatcher(path); watcher.addListener(new FileListener(LOGGER, () -> this.idTokenValidator.set(createIdTokenValidator(false)))); watcherService.add(watcher, ResourceWatcherService.Frequency.MEDIUM); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java index 59e4280bb774..f1ea40eedbaf 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java @@ -94,7 +94,7 @@ class SamlMetadataCommand extends KeyStoreAwareCommand { SamlMetadataCommand() { this((environment) -> { - KeyStoreWrapper ksWrapper = KeyStoreWrapper.load(environment.configFile()); + KeyStoreWrapper ksWrapper = KeyStoreWrapper.load(environment.configDir()); return ksWrapper; }); } @@ -459,7 +459,7 @@ class SamlMetadataCommand extends KeyStoreAwareCommand { final RealmConfig.RealmIdentifier identifier = new RealmConfig.RealmIdentifier(SingleSpSamlRealmSettings.TYPE, name); final Settings realmSettings = realms.get(identifier); if (realmSettings == null) { - throw new UserException(ExitCodes.CONFIG, "No such realm '" + name + "' defined in " + env.configFile()); + throw new UserException(ExitCodes.CONFIG, "No such realm '" + name + "' defined in " + env.configDir()); } if (isSamlRealm(identifier)) { return buildRealm(identifier, env, settings); @@ -472,10 +472,10 @@ class SamlMetadataCommand extends KeyStoreAwareCommand { .filter(entry -> isSamlRealm(entry.getKey())) .toList(); if (saml.isEmpty()) { - throw new UserException(ExitCodes.CONFIG, "There is no SAML realm configured in " + env.configFile()); + throw new UserException(ExitCodes.CONFIG, "There is no SAML realm configured in " + env.configDir()); } if (saml.size() > 1) { - terminal.errorPrintln("Using configuration in " + env.configFile()); + terminal.errorPrintln("Using configuration in " + env.configDir()); terminal.errorPrintln( "Found multiple SAML realms: " + saml.stream().map(Map.Entry::getKey).map(Object::toString).collect(Collectors.joining(", ")) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java index 874e10a0a6f1..641e4b8b7089 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java @@ -780,7 +780,7 @@ public final class SamlRealm extends Realm implements Releasable { ) throws ResolverException, ComponentInitializationException, IOException, PrivilegedActionException { final String entityId = require(config, IDP_ENTITY_ID); - final Path path = config.env().configFile().resolve(metadataPath); + final Path path = config.env().configDir().resolve(metadataPath); final FilesystemMetadataResolver resolver = new SamlFilesystemMetadataResolver(path.toFile()); for (var httpSetting : List.of(IDP_METADATA_HTTP_REFRESH, IDP_METADATA_HTTP_MIN_REFRESH, IDP_METADATA_HTTP_FAIL_ON_ERROR)) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHash.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHash.java index 8cce453f17fd..eaaa413f46de 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHash.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHash.java @@ -48,10 +48,10 @@ class AutoConfigGenerateElasticPasswordHash extends KeyStoreAwareCommand { final Hasher hasher = Hasher.resolve(XPackSettings.PASSWORD_HASHING_ALGORITHM.get(env.settings())); try ( SecureString elasticPassword = new SecureString(generatePassword(20)); - KeyStoreWrapper nodeKeystore = KeyStoreWrapper.bootstrap(env.configFile(), () -> new SecureString(new char[0])) + KeyStoreWrapper nodeKeystore = KeyStoreWrapper.bootstrap(env.configDir(), () -> new SecureString(new char[0])) ) { nodeKeystore.setString(AUTOCONFIG_ELASTIC_PASSWORD_HASH.getKey(), hasher.hash(elasticPassword)); - nodeKeystore.save(env.configFile(), new char[0]); + nodeKeystore.save(env.configDir(), new char[0]); terminal.print(Terminal.Verbosity.NORMAL, elasticPassword.toString()); } catch (Exception e) { throw new UserException(ExitCodes.CANT_CREATE, "Failed to generate a password for the elastic user", e); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java index 919f4531734f..8f5fc96761cc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java @@ -36,7 +36,7 @@ class CreateEnrollmentTokenTool extends BaseRunAsSuperuserCommand { CreateEnrollmentTokenTool() { this( environment -> new CommandLineHttpClient(environment), - environment -> KeyStoreWrapper.load(environment.configFile()), + environment -> KeyStoreWrapper.load(environment.configDir()), environment -> new ExternalEnrollmentTokenGenerator(environment) ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java index 2f45bafe493b..542bbbe086cc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java @@ -93,7 +93,7 @@ public abstract class BaseRunAsSuperuserCommand extends KeyStoreAwareCommand { settingsBuilder.setSecureSettings(keyStoreWrapper); } settings = settingsBuilder.build(); - newEnv = new Environment(settings, env.configFile()); + newEnv = new Environment(settings, env.configDir()); } else { newEnv = env; settings = env.settings(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java index 0ed6d92fd551..3ad55d5f6469 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java @@ -69,7 +69,7 @@ public class TransportKibanaEnrollmentActionTests extends ESTestCase { final Path tempDir = createTempDir(); final Path httpCaPath = tempDir.resolve("httpCa.p12"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/action/enrollment/httpCa.p12"), httpCaPath); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("keystore.secure_password", "password"); final Settings settings = Settings.builder().put("keystore.path", httpCaPath).setSecureSettings(secureSettings).build(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java index c85684a60e44..62af3d74410c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java @@ -78,7 +78,7 @@ public class TransportNodeEnrollmentActionTests extends ESTestCase { Path transportPath = tempDir.resolve("transport.p12"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/action/enrollment/httpCa.p12"), httpCaPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/action/enrollment/transport.p12"), transportPath); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); final SSLService sslService = mock(SSLService.class); final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("keystore.secure_password", "password"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java index b84282bd4066..417725d908b4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java @@ -72,7 +72,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { } public void testStore_ConfiguredWithUnreadableFile() throws Exception { - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path file = configDir.resolve("users"); @@ -88,7 +88,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { public void testStore_AutoReload() throws Exception { Path users = getDataPath("users"); - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path file = configDir.resolve("users"); Files.copy(users, file, StandardCopyOption.REPLACE_EXISTING); @@ -149,7 +149,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { public void testStore_AutoReload_WithParseFailures() throws Exception { Path users = getDataPath("users"); - Path confDir = env.configFile(); + Path confDir = env.configDir(); Files.createDirectories(confDir); Path testUsers = confDir.resolve("users"); Files.copy(users, testUsers, StandardCopyOption.REPLACE_EXISTING); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java index 258770b10c74..759f57a4e017 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java @@ -285,7 +285,7 @@ public class FileUserRolesStoreTests extends ESTestCase { } private Path getUsersRolesPath() throws IOException { - Path xpackConf = env.configFile(); + Path xpackConf = env.configDir(); Files.createDirectories(xpackConf); return xpackConf.resolve("users_roles"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java index 9800cb73faf6..3d05b7540596 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java @@ -41,7 +41,7 @@ public class JwkSetLoaderTests extends ESTestCase { final RealmConfig realmConfig = mock(RealmConfig.class); when(realmConfig.getSetting(JwtRealmSettings.PKC_JWKSET_PATH)).thenReturn("jwkset.json"); final Environment env = mock(Environment.class); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); when(realmConfig.env()).thenReturn(env); final JwkSetLoader jwkSetLoader = spy(new JwkSetLoader(realmConfig, List.of(), null)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java index f1927876eba5..2c9e57df60e2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java @@ -59,7 +59,7 @@ public class JwtSignatureValidatorTests extends ESTestCase { final RealmConfig realmConfig = mock(RealmConfig.class); when(realmConfig.getSetting(JwtRealmSettings.PKC_JWKSET_PATH)).thenReturn("jwkset.json"); final Environment env = mock(Environment.class); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); when(realmConfig.env()).thenReturn(env); validateSignatureAttemptCounter = new AtomicInteger(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java index f01914a7fed0..b15edd943db5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java @@ -63,7 +63,7 @@ public class KerberosRealmAuthenticateFailedTests extends KerberosRealmTestCase final boolean throwExceptionForInvalidTicket = validTicket ? false : randomBoolean(); final boolean throwLoginException = randomBoolean(); final byte[] decodedTicket = randomByteArrayOfLength(5); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); if (validTicket) { mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); @@ -144,7 +144,7 @@ public class KerberosRealmAuthenticateFailedTests extends KerberosRealmTestCase settings = Settings.builder().put(settings).putList("authorization_realms", "other_realm").build(); final KerberosRealm kerberosRealm = createKerberosRealm(Collections.singletonList(otherRealm), username); final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, "out-token"), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java index b1ddb631a8dd..c6431a8d8168 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java @@ -48,7 +48,7 @@ public class KerberosRealmCacheTests extends KerberosRealmTestCase { metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, username); final User expectedUser = new User(expectedUsername, roles.toArray(new String[0]), null, null, metadata, true); final byte[] decodedTicket = randomByteArrayOfLength(10); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); @@ -78,7 +78,7 @@ public class KerberosRealmCacheTests extends KerberosRealmTestCase { final String authNUsername = randomFrom(userNames); final byte[] decodedTicket = randomByteArrayOfLength(10); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(authNUsername, outToken), null); final String expectedUsername = maybeRemoveRealmName(authNUsername); @@ -137,7 +137,7 @@ public class KerberosRealmCacheTests extends KerberosRealmTestCase { metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, username); final User expectedUser = new User(expectedUsername, roles.toArray(new String[0]), null, null, metadata, true); final byte[] decodedTicket = randomByteArrayOfLength(10); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java index 177507ce6d79..e4718f3e9501 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java @@ -82,7 +82,7 @@ public class KerberosRealmTests extends KerberosRealmTestCase { metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, username); final User expectedUser = new User(expectedUsername, roles.toArray(new String[roles.size()]), null, null, metadata, true); final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, "out-token"), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); @@ -106,7 +106,7 @@ public class KerberosRealmTests extends KerberosRealmTestCase { final String username = randomPrincipalName(); final KerberosRealm kerberosRealm = createKerberosRealm(username); final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>("does-not-exist@REALM", "out-token"), null); @@ -236,7 +236,7 @@ public class KerberosRealmTests extends KerberosRealmTestCase { final KerberosRealm kerberosRealm = createKerberosRealm(Collections.singletonList(otherRealm), username); final User expectedUser = lookupUser; final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, "out-token"), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java index 00b55e5b4833..0f2a720660af 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java @@ -128,7 +128,7 @@ public class FileServiceAccountTokenStoreTests extends ESTestCase { public void testAutoReload() throws Exception { Path serviceTokensSourceFile = getDataPath("service_tokens"); - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path targetFile = configDir.resolve("service_tokens"); Files.copy(serviceTokensSourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING); @@ -225,7 +225,7 @@ public class FileServiceAccountTokenStoreTests extends ESTestCase { public void testFindTokensFor() throws IOException { Path serviceTokensSourceFile = getDataPath("service_tokens"); - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path targetFile = configDir.resolve("service_tokens"); Files.copy(serviceTokensSourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java index 6332e63ca595..ee025fe64ff9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java @@ -76,8 +76,8 @@ public class DnRoleMapperTests extends ESTestCase { public void init() throws IOException { settings = Settings.builder().put("resource.reload.interval.high", "100ms").put("path.home", createTempDir()).build(); env = TestEnvironment.newEnvironment(settings); - if (Files.exists(env.configFile()) == false) { - Files.createDirectory(env.configFile()); + if (Files.exists(env.configDir()) == false) { + Files.createDirectory(env.configDir()); } threadPool = new TestThreadPool("test"); } @@ -100,7 +100,7 @@ public class DnRoleMapperTests extends ESTestCase { public void testMapper_AutoReload() throws Exception { Path roleMappingFile = getDataPath("role_mapping.yml"); - Path file = env.configFile().resolve("test_role_mapping.yml"); + Path file = env.configDir().resolve("test_role_mapping.yml"); Files.copy(roleMappingFile, file, StandardCopyOption.REPLACE_EXISTING); final CountDownLatch latch = new CountDownLatch(1); @@ -144,7 +144,7 @@ public class DnRoleMapperTests extends ESTestCase { public void testMapper_AutoReload_WithParseFailures() throws Exception { Path roleMappingFile = getDataPath("role_mapping.yml"); - Path file = env.configFile().resolve("test_role_mapping.yml"); + Path file = env.configDir().resolve("test_role_mapping.yml"); Files.copy(roleMappingFile, file, StandardCopyOption.REPLACE_EXISTING); final CountDownLatch latch = new CountDownLatch(1); @@ -171,7 +171,7 @@ public class DnRoleMapperTests extends ESTestCase { public void testMapperAutoReloadWithoutListener() throws Exception { Path roleMappingFile = getDataPath("role_mapping.yml"); - Path file = env.configFile().resolve("test_role_mapping.yml"); + Path file = env.configDir().resolve("test_role_mapping.yml"); Files.copy(roleMappingFile, file, StandardCopyOption.REPLACE_EXISTING); try (ResourceWatcherService watcherService = new ResourceWatcherService(settings, threadPool)) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java index 34cfde8dc862..972c00b59b1f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java @@ -484,7 +484,7 @@ public class FileOperatorUsersStoreTests extends ESTestCase { } private Path getOperatorUsersPath() throws IOException { - Path xpackConf = env.configFile(); + Path xpackConf = env.configDir(); Files.createDirectories(xpackConf); return xpackConf.resolve("operator_users.yml"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java index 9bb0c8af6f48..2ac2d4ebf0c3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java @@ -375,7 +375,7 @@ public class SSLErrorMessageFileTests extends ESTestCase { + " [" + fileName + "] because access to read the file is blocked; SSL resources should be placed in the [" - + env.configFile().toAbsolutePath().toString() + + env.configDir().toAbsolutePath().toString() + "] directory"; Throwable exception = expectFailure(settings); @@ -477,7 +477,7 @@ public class SSLErrorMessageFileTests extends ESTestCase { private ElasticsearchException expectFailure(Settings.Builder settings) { return expectThrows( ElasticsearchException.class, - () -> new SSLService(new Environment(buildEnvSettings(settings.build()), env.configFile())) + () -> new SSLService(new Environment(buildEnvSettings(settings.build()), env.configDir())) ); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java index 337fc00cc7ca..093959978b0d 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java @@ -112,8 +112,8 @@ public class WatcherScheduleEngineBenchmark { Node node = new Node( internalNodeEnv, PluginsLoader.createPluginsLoader( - PluginsLoader.loadModulesBundles(internalNodeEnv.modulesFile()), - PluginsLoader.loadPluginsBundles(internalNodeEnv.pluginsFile()), + PluginsLoader.loadModulesBundles(internalNodeEnv.modulesDir()), + PluginsLoader.loadPluginsBundles(internalNodeEnv.pluginsDir()), Map.of() ) ).start() diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 65f2282014dc..a3f50089d542 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -31,7 +31,9 @@ restResources { tasks.named("yamlRestTest").configure { ArrayList blacklist = [ 'index/10_with_id/Index with ID', - 'indices.get_alias/10_basic/Get alias against closed indices' + 'indices.get_alias/10_basic/Get alias against closed indices', + 'update/100_synthetic_source/keyword', + 'update/100_synthetic_source/stored text' ]; if (buildParams.isSnapshotBuild() == false) { blacklist += [ diff --git a/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java b/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java index 261bc567d5c9..74cb057278c4 100644 --- a/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java +++ b/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java @@ -152,7 +152,7 @@ public abstract class KerberosTestCase extends ESTestCase { protected Path getKeytabPath(Environment env) { final Setting setting = KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.getConcreteSettingForNamespace(REALM_NAME); - return env.configFile().resolve(setting.get(settings)); + return env.configDir().resolve(setting.get(settings)); } /** diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java index e55e35ae0932..6f72382078a2 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java @@ -195,7 +195,7 @@ public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase { createDataStreamFromNonDataStreamIndices(dataStreamFromNonDataStreamIndices); } else if (CLUSTER_TYPE == ClusterType.UPGRADED) { upgradeDataStream(dataStreamName, numRollovers, numRollovers + 1, 0); - upgradeDataStream(dataStreamFromNonDataStreamIndices, 0, 0, 1); + upgradeDataStream(dataStreamFromNonDataStreamIndices, 0, 1, 0); } } diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java index 95c3fd4fde91..a5330d3daf92 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java +++ b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java @@ -97,18 +97,18 @@ public class AutoConfigGenerateElasticPasswordHashTests extends CommandTestCase public void testSuccessfullyGenerateAndStoreHash() throws Exception { execute(); assertThat(terminal.getOutput(), hasLength(20)); - KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configDir()); assertNotNull(keyStoreWrapper); keyStoreWrapper.decrypt(new char[0]); assertThat(keyStoreWrapper.getSettingNames(), containsInAnyOrder(AUTOCONFIG_ELASTIC_PASSWORD_HASH.getKey(), "keystore.seed")); } public void testExistingKeystoreWithWrongPassword() throws Exception { - KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configDir()); assertNotNull(keyStoreWrapper); keyStoreWrapper.decrypt(new char[0]); // set a random password so that we fail to decrypt it in GenerateElasticPasswordHash#execute - keyStoreWrapper.save(env.configFile(), randomAlphaOfLength(16).toCharArray()); + keyStoreWrapper.save(env.configDir(), randomAlphaOfLength(16).toCharArray()); UserException e = expectThrows(UserException.class, this::execute); assertThat(e.getMessage(), equalTo("Failed to generate a password for the elastic user")); assertThat(terminal.getOutput(), is(emptyString()));