mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-04-19 04:45:07 -04:00
Use logs dir as working directory (#124966)
In the unexpected case that Elasticsearch dies due to a segfault or other similar native issue, a core dump is useful in diagnosing the problem. Yet core dumps are written to the working directory, which is read-only for most installations of Elasticsearch. This commit changes the working directory to the logs dir which should always be writeable.
This commit is contained in:
parent
d82886fc52
commit
3bac50e818
20 changed files with 177 additions and 89 deletions
|
@ -179,9 +179,9 @@ echo "Running elasticsearch \$0"
|
|||
|
||||
file(distProjectFolder, 'src/config/elasticsearch.properties') << "some propes"
|
||||
file(distProjectFolder, 'src/config/jvm.options') << """
|
||||
-Xlog:gc*,gc+age=trace,safepoint:file=logs/gc.log:utctime,level,pid,tags:filecount=32,filesize=64m
|
||||
-XX:ErrorFile=logs/hs_err_pid%p.log
|
||||
-XX:HeapDumpPath=data
|
||||
-Xlog:gc*,gc+age=trace,safepoint:file=gc.log:utctime,level,pid,tags:filecount=32,filesize=64m
|
||||
-XX:ErrorFile=hs_err_pid%p.log
|
||||
# -XX:HeapDumpPath=/heap/dump/path
|
||||
"""
|
||||
file(distProjectFolder, 'build.gradle') << """
|
||||
import org.gradle.api.internal.artifacts.ArtifactAttributes;
|
||||
|
|
|
@ -1435,12 +1435,17 @@ public class ElasticsearchNode implements TestClusterConfiguration {
|
|||
Path jvmOptions = configFileRoot.resolve("jvm.options");
|
||||
try {
|
||||
String content = new String(Files.readAllBytes(jvmOptions));
|
||||
Map<String, String> expansions = jvmOptionExpansions();
|
||||
for (String origin : expansions.keySet()) {
|
||||
if (content.contains(origin) == false) {
|
||||
throw new IOException("template property " + origin + " not found in template.");
|
||||
Map<ReplacementKey, String> expansions = jvmOptionExpansions();
|
||||
for (var entry : expansions.entrySet()) {
|
||||
ReplacementKey replacement = entry.getKey();
|
||||
String key = replacement.key();
|
||||
if (content.contains(key) == false) {
|
||||
key = replacement.fallback();
|
||||
if (content.contains(key) == false) {
|
||||
throw new IOException("Template property '" + replacement + "' not found in template:\n" + content);
|
||||
}
|
||||
}
|
||||
content = content.replace(origin, expansions.get(origin));
|
||||
content = content.replace(key, entry.getValue());
|
||||
}
|
||||
Files.write(jvmOptions, content.getBytes());
|
||||
} catch (IOException ioException) {
|
||||
|
@ -1448,17 +1453,39 @@ public class ElasticsearchNode implements TestClusterConfiguration {
|
|||
}
|
||||
}
|
||||
|
||||
private Map<String, String> jvmOptionExpansions() {
|
||||
Map<String, String> expansions = new HashMap<>();
|
||||
private record ReplacementKey(String key, String fallback) {}
|
||||
|
||||
private Map<ReplacementKey, String> jvmOptionExpansions() {
|
||||
Map<ReplacementKey, String> expansions = new HashMap<>();
|
||||
Version version = getVersion();
|
||||
String heapDumpOrigin = getVersion().onOrAfter("6.3.0") ? "-XX:HeapDumpPath=data" : "-XX:HeapDumpPath=/heap/dump/path";
|
||||
expansions.put(heapDumpOrigin, "-XX:HeapDumpPath=" + confPathLogs);
|
||||
if (version.onOrAfter("6.2.0")) {
|
||||
expansions.put("logs/gc.log", confPathLogs.resolve("gc.log").toString());
|
||||
|
||||
ReplacementKey heapDumpPathSub;
|
||||
if (version.before("8.19.0") && version.onOrAfter("6.3.0")) {
|
||||
heapDumpPathSub = new ReplacementKey("-XX:HeapDumpPath=data", null);
|
||||
} else {
|
||||
// temporarily fall back to the old substitution so both old and new work during backport
|
||||
heapDumpPathSub = new ReplacementKey("# -XX:HeapDumpPath=/heap/dump/path", "-XX:HeapDumpPath=data");
|
||||
}
|
||||
if (getVersion().getMajor() >= 7) {
|
||||
expansions.put("-XX:ErrorFile=logs/hs_err_pid%p.log", "-XX:ErrorFile=" + confPathLogs.resolve("hs_err_pid%p.log"));
|
||||
expansions.put(heapDumpPathSub, "-XX:HeapDumpPath=" + confPathLogs);
|
||||
|
||||
ReplacementKey gcLogSub;
|
||||
if (version.before("8.19.0") && version.onOrAfter("6.2.0")) {
|
||||
gcLogSub = new ReplacementKey("logs/gc.log", null);
|
||||
} else {
|
||||
// temporarily check the old substitution first so both old and new work during backport
|
||||
gcLogSub = new ReplacementKey("logs/gc.log", "gc.log");
|
||||
}
|
||||
expansions.put(gcLogSub, confPathLogs.resolve("gc.log").toString());
|
||||
|
||||
ReplacementKey errorFileSub;
|
||||
if (version.before("8.19.0") && version.getMajor() >= 7) {
|
||||
errorFileSub = new ReplacementKey("-XX:ErrorFile=logs/hs_err_pid%p.log", null);
|
||||
} else {
|
||||
// temporarily check the old substitution first so both old and new work during backport
|
||||
errorFileSub = new ReplacementKey("-XX:ErrorFile=logs/hs_err_pid%p.log", "-XX:ErrorFile=hs_err_pid%p.log");
|
||||
}
|
||||
expansions.put(errorFileSub, "-XX:ErrorFile=" + confPathLogs.resolve("hs_err_pid%p.log"));
|
||||
|
||||
return expansions;
|
||||
}
|
||||
|
||||
|
|
|
@ -531,7 +531,6 @@ subprojects {
|
|||
final String packagingPathData = "path.data: /var/lib/elasticsearch"
|
||||
final String pathLogs = "/var/log/elasticsearch"
|
||||
final String packagingPathLogs = "path.logs: ${pathLogs}"
|
||||
final String packagingLoggc = "${pathLogs}/gc.log"
|
||||
|
||||
String licenseText
|
||||
if (isTestDistro) {
|
||||
|
@ -576,23 +575,6 @@ subprojects {
|
|||
'rpm': packagingPathLogs,
|
||||
'def': '#path.logs: /path/to/logs'
|
||||
],
|
||||
'loggc': [
|
||||
'deb': packagingLoggc,
|
||||
'rpm': packagingLoggc,
|
||||
'def': 'logs/gc.log'
|
||||
],
|
||||
|
||||
'heap.dump.path': [
|
||||
'deb': "-XX:HeapDumpPath=/var/lib/elasticsearch",
|
||||
'rpm': "-XX:HeapDumpPath=/var/lib/elasticsearch",
|
||||
'def': "-XX:HeapDumpPath=data"
|
||||
],
|
||||
|
||||
'error.file': [
|
||||
'deb': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log",
|
||||
'rpm': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log",
|
||||
'def': "-XX:ErrorFile=logs/hs_err_pid%p.log"
|
||||
],
|
||||
|
||||
'scripts.footer': [
|
||||
/* Debian needs exit 0 on these scripts so we add it here and preserve
|
||||
|
|
|
@ -77,10 +77,10 @@
|
|||
|
||||
# specify an alternative path for heap dumps; ensure the directory exists and
|
||||
# has sufficient space
|
||||
@heap.dump.path@
|
||||
# -XX:HeapDumpPath=/heap/dump/path
|
||||
|
||||
# specify an alternative path for JVM fatal error logs
|
||||
@error.file@
|
||||
-XX:ErrorFile=hs_err_pid%p.log
|
||||
|
||||
## GC logging
|
||||
-Xlog:gc*,gc+age=trace,safepoint:file=@loggc@:utctime,level,pid,tags:filecount=32,filesize=64m
|
||||
-Xlog:gc*,gc+age=trace,safepoint:file=gc.log:utctime,level,pid,tags:filecount=32,filesize=64m
|
||||
|
|
|
@ -58,7 +58,7 @@ class CliToolLauncher {
|
|||
String toolname = getToolName(pinfo.sysprops());
|
||||
String libs = pinfo.sysprops().getOrDefault("cli.libs", "");
|
||||
|
||||
command = CliToolProvider.load(toolname, libs).create();
|
||||
command = CliToolProvider.load(pinfo.sysprops(), toolname, libs).create();
|
||||
Terminal terminal = Terminal.DEFAULT;
|
||||
Runtime.getRuntime().addShutdownHook(createShutdownHook(terminal, command));
|
||||
|
||||
|
|
|
@ -10,12 +10,14 @@
|
|||
package org.elasticsearch.server.cli;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.core.SuppressForbidden;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
@ -106,7 +108,9 @@ class JvmOption {
|
|||
userDefinedJvmOptions.stream(),
|
||||
Stream.of("-XX:+PrintFlagsFinal", "-version")
|
||||
).flatMap(Function.identity()).toList();
|
||||
final Process process = new ProcessBuilder().command(command).start();
|
||||
final ProcessBuilder builder = new ProcessBuilder().command(command);
|
||||
setWorkingDir(builder);
|
||||
final Process process = builder.start();
|
||||
final List<String> output = readLinesFromInputStream(process.getInputStream());
|
||||
final List<String> error = readLinesFromInputStream(process.getErrorStream());
|
||||
final int status = process.waitFor();
|
||||
|
@ -124,6 +128,14 @@ class JvmOption {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "ProcessBuilder takes File")
|
||||
private static void setWorkingDir(ProcessBuilder builder) throws IOException {
|
||||
// The real ES process uses the logs dir as the working directory. Since we don't
|
||||
// have the logs dir yet, here we use a temp directory for calculating jvm options.
|
||||
final Path tmpDir = Files.createTempDirectory("final-flags");
|
||||
builder.directory(tmpDir.toFile());
|
||||
}
|
||||
|
||||
private static List<String> readLinesFromInputStream(final InputStream is) throws IOException {
|
||||
try (InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8); BufferedReader br = new BufferedReader(isr)) {
|
||||
return br.lines().toList();
|
||||
|
|
|
@ -269,13 +269,13 @@ public final class JvmOptionsParser {
|
|||
* and the following JVM options will not be accepted:
|
||||
* <ul>
|
||||
* <li>
|
||||
* {@code 18:-Xlog:age*=trace,gc*,safepoint:file=logs/gc.log:utctime,pid,tags:filecount=32,filesize=64m}
|
||||
* {@code 18:-Xlog:age*=trace,gc*,safepoint:file=gc.log:utctime,pid,tags:filecount=32,filesize=64m}
|
||||
* </li>
|
||||
* <li>
|
||||
* {@code 18-:-Xlog:age*=trace,gc*,safepoint:file=logs/gc.log:utctime,pid,tags:filecount=32,filesize=64m}
|
||||
* {@code 18-:-Xlog:age*=trace,gc*,safepoint:file=gc.log:utctime,pid,tags:filecount=32,filesize=64m}
|
||||
* </li>
|
||||
* <li>
|
||||
* {@code 18-19:-Xlog:age*=trace,gc*,safepoint:file=logs/gc.log:utctime,pid,tags:filecount=32,filesize=64m}
|
||||
* {@code 18-19:-Xlog:age*=trace,gc*,safepoint:file=gc.log:utctime,pid,tags:filecount=32,filesize=64m}
|
||||
* </li>
|
||||
* </ul>
|
||||
*
|
||||
|
|
|
@ -33,6 +33,7 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/**
|
||||
|
@ -168,7 +169,7 @@ class ServerCli extends EnvironmentAwareCommand {
|
|||
assert secureSettingsLoader(env) instanceof KeyStoreLoader;
|
||||
|
||||
String autoConfigLibs = "modules/x-pack-core,modules/x-pack-security,lib/tools/security-cli";
|
||||
Command cmd = loadTool("auto-configure-node", autoConfigLibs);
|
||||
Command cmd = loadTool(processInfo.sysprops(), "auto-configure-node", autoConfigLibs);
|
||||
assert cmd instanceof EnvironmentAwareCommand;
|
||||
@SuppressWarnings("raw")
|
||||
var autoConfigNode = (EnvironmentAwareCommand) cmd;
|
||||
|
@ -210,7 +211,7 @@ class ServerCli extends EnvironmentAwareCommand {
|
|||
// package private for testing
|
||||
void syncPlugins(Terminal terminal, Environment env, ProcessInfo processInfo) throws Exception {
|
||||
String pluginCliLibs = "lib/tools/plugin-cli";
|
||||
Command cmd = loadTool("sync-plugins", pluginCliLibs);
|
||||
Command cmd = loadTool(processInfo.sysprops(), "sync-plugins", pluginCliLibs);
|
||||
assert cmd instanceof EnvironmentAwareCommand;
|
||||
@SuppressWarnings("raw")
|
||||
var syncPlugins = (EnvironmentAwareCommand) cmd;
|
||||
|
@ -258,8 +259,8 @@ class ServerCli extends EnvironmentAwareCommand {
|
|||
}
|
||||
|
||||
// protected to allow tests to override
|
||||
protected Command loadTool(String toolname, String libs) {
|
||||
return CliToolProvider.load(toolname, libs).create();
|
||||
protected Command loadTool(Map<String, String> sysprops, String toolname, String libs) {
|
||||
return CliToolProvider.load(sysprops, toolname, libs).create();
|
||||
}
|
||||
|
||||
// protected to allow tests to override
|
||||
|
@ -270,7 +271,8 @@ class ServerCli extends EnvironmentAwareCommand {
|
|||
.withProcessInfo(processInfo)
|
||||
.withServerArgs(args)
|
||||
.withTempDir(tempDir)
|
||||
.withJvmOptions(jvmOptions);
|
||||
.withJvmOptions(jvmOptions)
|
||||
.withWorkingDir(args.logsDir());
|
||||
return serverProcessBuilder.start();
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ import org.elasticsearch.cli.UserException;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
||||
import org.elasticsearch.core.PathUtils;
|
||||
import org.elasticsearch.core.SuppressForbidden;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
@ -43,6 +44,7 @@ public class ServerProcessBuilder {
|
|||
private ServerArgs serverArgs;
|
||||
private ProcessInfo processInfo;
|
||||
private List<String> jvmOptions;
|
||||
private Path workingDir;
|
||||
private Terminal terminal;
|
||||
|
||||
// this allows mocking the process building by tests
|
||||
|
@ -82,6 +84,11 @@ public class ServerProcessBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
public ServerProcessBuilder withWorkingDir(Path workingDir) {
|
||||
this.workingDir = workingDir;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the {@link Terminal} to use for reading input and writing output from/to the cli console
|
||||
*/
|
||||
|
@ -155,7 +162,7 @@ public class ServerProcessBuilder {
|
|||
|
||||
boolean success = false;
|
||||
try {
|
||||
jvmProcess = createProcess(getCommand(), getJvmArgs(), jvmOptions, getEnvironment(), processStarter);
|
||||
jvmProcess = createProcess(getCommand(), getJvmArgs(), jvmOptions, getEnvironment(), workingDir, processStarter);
|
||||
errorPump = new ErrorPumpThread(terminal, jvmProcess.getErrorStream());
|
||||
errorPump.start();
|
||||
sendArgs(serverArgs, jvmProcess.getOutputStream());
|
||||
|
@ -185,16 +192,23 @@ public class ServerProcessBuilder {
|
|||
List<String> jvmArgs,
|
||||
List<String> jvmOptions,
|
||||
Map<String, String> environment,
|
||||
Path workingDir,
|
||||
ProcessStarter processStarter
|
||||
) throws InterruptedException, IOException {
|
||||
|
||||
var builder = new ProcessBuilder(Stream.concat(Stream.of(command), Stream.concat(jvmOptions.stream(), jvmArgs.stream())).toList());
|
||||
builder.environment().putAll(environment);
|
||||
setWorkingDir(builder, workingDir);
|
||||
builder.redirectOutput(ProcessBuilder.Redirect.INHERIT);
|
||||
|
||||
return processStarter.start(builder);
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "ProcessBuilder takes File")
|
||||
private static void setWorkingDir(ProcessBuilder builder, Path path) {
|
||||
builder.directory(path.toFile());
|
||||
}
|
||||
|
||||
private static void sendArgs(ServerArgs args, OutputStream processStdin) {
|
||||
// DO NOT close the underlying process stdin, since we need to be able to write to it to signal exit
|
||||
var out = new OutputStreamStreamOutput(processStdin);
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.stream.Stream;
|
|||
final class SystemJvmOptions {
|
||||
|
||||
static List<String> systemJvmOptions(Settings nodeSettings, final Map<String, String> sysprops) {
|
||||
Path esHome = Path.of(sysprops.get("es.path.home"));
|
||||
String distroType = sysprops.get("es.distribution.type");
|
||||
String javaType = sysprops.get("es.java.type");
|
||||
boolean isHotspot = sysprops.getOrDefault("sun.management.compiler", "").contains("HotSpot");
|
||||
|
@ -67,7 +68,8 @@ final class SystemJvmOptions {
|
|||
"-Djava.locale.providers=CLDR",
|
||||
// Enable vectorization for whatever version we are running. This ensures we use vectorization even when running EA builds.
|
||||
"-Dorg.apache.lucene.vectorization.upperJavaFeatureVersion=" + Runtime.version().feature(),
|
||||
// Pass through distribution type and java type
|
||||
// Pass through some properties
|
||||
"-Des.path.home=" + esHome,
|
||||
"-Des.distribution.type=" + distroType,
|
||||
"-Des.java.type=" + javaType
|
||||
),
|
||||
|
@ -77,7 +79,7 @@ final class SystemJvmOptions {
|
|||
maybeSetReplayFile(distroType, isHotspot),
|
||||
maybeWorkaroundG1Bug(),
|
||||
maybeAllowSecurityManager(useEntitlements),
|
||||
maybeAttachEntitlementAgent(useEntitlements)
|
||||
maybeAttachEntitlementAgent(esHome, useEntitlements)
|
||||
).flatMap(s -> s).toList();
|
||||
}
|
||||
|
||||
|
@ -159,12 +161,12 @@ final class SystemJvmOptions {
|
|||
return Stream.of();
|
||||
}
|
||||
|
||||
private static Stream<String> maybeAttachEntitlementAgent(boolean useEntitlements) {
|
||||
private static Stream<String> maybeAttachEntitlementAgent(Path esHome, boolean useEntitlements) {
|
||||
if (useEntitlements == false) {
|
||||
return Stream.empty();
|
||||
}
|
||||
|
||||
Path dir = Path.of("lib", "entitlement-bridge");
|
||||
Path dir = esHome.resolve("lib/entitlement-bridge");
|
||||
if (Files.exists(dir) == false) {
|
||||
throw new IllegalStateException("Directory for entitlement bridge jar does not exist: " + dir);
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
|
||||
package org.elasticsearch.server.cli;
|
||||
|
||||
import org.apache.lucene.tests.util.LuceneTestCase;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.core.IOUtils;
|
||||
|
@ -43,16 +44,18 @@ import static org.hamcrest.Matchers.hasSize;
|
|||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
@WithoutSecurityManager
|
||||
@LuceneTestCase.SuppressFileSystems("*")
|
||||
public class JvmOptionsParserTests extends ESTestCase {
|
||||
|
||||
private static final Map<String, String> TEST_SYSPROPS = Map.of("os.name", "Linux", "os.arch", "aarch64");
|
||||
|
||||
private static final Path ENTITLEMENTS_LIB_DIR = Path.of("lib", "entitlement-bridge");
|
||||
private static Map<String, String> testSysprops;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws IOException {
|
||||
Files.createDirectories(ENTITLEMENTS_LIB_DIR);
|
||||
Files.createTempFile(ENTITLEMENTS_LIB_DIR, "mock-entitlements-bridge", ".jar");
|
||||
Path homeDir = createTempDir();
|
||||
Path entitlementLibDir = homeDir.resolve("lib/entitlement-bridge");
|
||||
Files.createDirectories(entitlementLibDir);
|
||||
Files.createTempFile(entitlementLibDir, "mock-entitlements-bridge", ".jar");
|
||||
testSysprops = Map.of("os.name", "Linux", "os.arch", "aarch64", "es.path.home", homeDir.toString());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
|
@ -369,30 +372,30 @@ public class JvmOptionsParserTests extends ESTestCase {
|
|||
|
||||
public void testNodeProcessorsActiveCount() {
|
||||
{
|
||||
final List<String> jvmOptions = SystemJvmOptions.systemJvmOptions(Settings.EMPTY, TEST_SYSPROPS);
|
||||
final List<String> jvmOptions = SystemJvmOptions.systemJvmOptions(Settings.EMPTY, testSysprops);
|
||||
assertThat(jvmOptions, not(hasItem(containsString("-XX:ActiveProcessorCount="))));
|
||||
}
|
||||
{
|
||||
Settings nodeSettings = Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), 1).build();
|
||||
final List<String> jvmOptions = SystemJvmOptions.systemJvmOptions(nodeSettings, TEST_SYSPROPS);
|
||||
final List<String> jvmOptions = SystemJvmOptions.systemJvmOptions(nodeSettings, testSysprops);
|
||||
assertThat(jvmOptions, hasItem("-XX:ActiveProcessorCount=1"));
|
||||
}
|
||||
{
|
||||
// check rounding
|
||||
Settings nodeSettings = Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), 0.2).build();
|
||||
final List<String> jvmOptions = SystemJvmOptions.systemJvmOptions(nodeSettings, TEST_SYSPROPS);
|
||||
final List<String> jvmOptions = SystemJvmOptions.systemJvmOptions(nodeSettings, testSysprops);
|
||||
assertThat(jvmOptions, hasItem("-XX:ActiveProcessorCount=1"));
|
||||
}
|
||||
{
|
||||
// check validation
|
||||
Settings nodeSettings = Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), 10000).build();
|
||||
var e = expectThrows(IllegalArgumentException.class, () -> SystemJvmOptions.systemJvmOptions(nodeSettings, TEST_SYSPROPS));
|
||||
var e = expectThrows(IllegalArgumentException.class, () -> SystemJvmOptions.systemJvmOptions(nodeSettings, testSysprops));
|
||||
assertThat(e.getMessage(), containsString("setting [node.processors] must be <="));
|
||||
}
|
||||
}
|
||||
|
||||
public void testCommandLineDistributionType() {
|
||||
var sysprops = new HashMap<>(TEST_SYSPROPS);
|
||||
var sysprops = new HashMap<>(testSysprops);
|
||||
sysprops.put("es.distribution.type", "testdistro");
|
||||
final List<String> jvmOptions = SystemJvmOptions.systemJvmOptions(Settings.EMPTY, sysprops);
|
||||
assertThat(jvmOptions, hasItem("-Des.distribution.type=testdistro"));
|
||||
|
|
|
@ -36,6 +36,7 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.BrokenBarrierException;
|
||||
import java.util.concurrent.CyclicBarrier;
|
||||
|
@ -558,7 +559,7 @@ public class ServerCliTests extends CommandTestCase {
|
|||
boolean startServerCalled = false;
|
||||
|
||||
@Override
|
||||
protected Command loadTool(String toolname, String libs) {
|
||||
protected Command loadTool(Map<String, String> sysprops, String toolname, String libs) {
|
||||
if (toolname.equals("auto-configure-node")) {
|
||||
assertThat(libs, equalTo("modules/x-pack-core,modules/x-pack-security,lib/tools/security-cli"));
|
||||
return AUTO_CONFIG_CLI;
|
||||
|
|
|
@ -57,7 +57,6 @@ import static org.hamcrest.Matchers.hasItems;
|
|||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class ServerProcessTests extends ESTestCase {
|
||||
|
||||
|
@ -66,6 +65,7 @@ public class ServerProcessTests extends ESTestCase {
|
|||
protected final Map<String, String> sysprops = new HashMap<>();
|
||||
protected final Map<String, String> envVars = new HashMap<>();
|
||||
Path esHomeDir;
|
||||
Path workingDir;
|
||||
Settings.Builder nodeSettings;
|
||||
ProcessValidator processValidator;
|
||||
MainMethod mainCallback;
|
||||
|
@ -88,12 +88,14 @@ public class ServerProcessTests extends ESTestCase {
|
|||
|
||||
@Before
|
||||
public void resetEnv() {
|
||||
esHomeDir = createTempDir();
|
||||
terminal.reset();
|
||||
sysprops.clear();
|
||||
sysprops.put("os.name", "Linux");
|
||||
sysprops.put("java.home", "javahome");
|
||||
sysprops.put("es.path.home", esHomeDir.toString());
|
||||
envVars.clear();
|
||||
esHomeDir = createTempDir();
|
||||
workingDir = createTempDir();
|
||||
nodeSettings = Settings.builder();
|
||||
processValidator = null;
|
||||
mainCallback = null;
|
||||
|
@ -229,7 +231,8 @@ public class ServerProcessTests extends ESTestCase {
|
|||
.withProcessInfo(pinfo)
|
||||
.withServerArgs(createServerArgs(daemonize, quiet))
|
||||
.withJvmOptions(List.of())
|
||||
.withTempDir(ServerProcessUtils.setupTempDir(pinfo));
|
||||
.withTempDir(ServerProcessUtils.setupTempDir(pinfo))
|
||||
.withWorkingDir(workingDir);
|
||||
return serverProcessBuilder.start(starter);
|
||||
}
|
||||
|
||||
|
@ -238,7 +241,7 @@ public class ServerProcessTests extends ESTestCase {
|
|||
assertThat(pb.redirectInput(), equalTo(ProcessBuilder.Redirect.PIPE));
|
||||
assertThat(pb.redirectOutput(), equalTo(ProcessBuilder.Redirect.INHERIT));
|
||||
assertThat(pb.redirectError(), equalTo(ProcessBuilder.Redirect.PIPE));
|
||||
assertThat(pb.directory(), nullValue()); // leave default, which is working directory
|
||||
assertThat(String.valueOf(pb.directory()), equalTo(workingDir.toString())); // leave default, which is working directory
|
||||
};
|
||||
mainCallback = (args, stdin, stderr, exitCode) -> {
|
||||
try (PrintStream err = new PrintStream(stderr, true, StandardCharsets.UTF_8)) {
|
||||
|
@ -312,7 +315,8 @@ public class ServerProcessTests extends ESTestCase {
|
|||
.withProcessInfo(createProcessInfo())
|
||||
.withServerArgs(createServerArgs(false, false))
|
||||
.withJvmOptions(List.of("-Dfoo1=bar", "-Dfoo2=baz"))
|
||||
.withTempDir(Path.of("."));
|
||||
.withTempDir(Path.of("."))
|
||||
.withWorkingDir(workingDir);
|
||||
serverProcessBuilder.start(starter).waitFor();
|
||||
}
|
||||
|
||||
|
|
5
docs/changelog/124966.yaml
Normal file
5
docs/changelog/124966.yaml
Normal file
|
@ -0,0 +1,5 @@
|
|||
pr: 124966
|
||||
summary: Use logs dir as working directory
|
||||
area: Infra/CLI
|
||||
type: enhancement
|
||||
issues: []
|
|
@ -131,9 +131,9 @@ If you are running {{es}} as a Windows service, you can change the heap size usi
|
|||
|
||||
## JVM heap dump path setting [heap-dump-path-setting]
|
||||
|
||||
By default, {{es}} configures the JVM to dump the heap on out of memory exceptions to the default data directory. On [RPM](docs-content://deploy-manage/deploy/self-managed/install-elasticsearch-with-rpm.md) and [Debian](docs-content://deploy-manage/deploy/self-managed/install-elasticsearch-with-debian-package.md) packages, the data directory is `/var/lib/elasticsearch`. On [Linux and MacOS](docs-content://deploy-manage/deploy/self-managed/install-elasticsearch-from-archive-on-linux-macos.md) and [Windows](docs-content://deploy-manage/deploy/self-managed/install-elasticsearch-with-zip-on-windows.md) distributions, the `data` directory is located under the root of the {{es}} installation.
|
||||
By default, {{es}} configures the JVM to dump the heap on out of memory exceptions to the default logs directory. On [RPM](docs-content://deploy-manage/deploy/self-managed/install-elasticsearch-with-rpm.md) and [Debian](docs-content://deploy-manage/deploy/self-managed/install-elasticsearch-with-debian-package.md) packages, the logs directory is `/var/log/elasticsearch`. On [Linux and MacOS](docs-content://deploy-manage/deploy/self-managed/install-elasticsearch-from-archive-on-linux-macos.md) and [Windows](docs-content://deploy-manage/deploy/self-managed/install-elasticsearch-with-zip-on-windows.md) distributions, the `logs` directory is located under the root of the {{es}} installation.
|
||||
|
||||
If this path is not suitable for receiving heap dumps, modify the `-XX:HeapDumpPath=...` entry in [`jvm.options`](#set-jvm-options):
|
||||
If this path is not suitable for receiving heap dumps, add the `-XX:HeapDumpPath=...` entry in [`jvm.options`](#set-jvm-options):
|
||||
|
||||
* If you specify a directory, the JVM will generate a filename for the heap dump based on the PID of the running instance.
|
||||
* If you specify a fixed filename instead of a directory, the file must not exist when the JVM needs to perform a heap dump on an out of memory exception. Otherwise, the heap dump will fail.
|
||||
|
|
|
@ -19,6 +19,7 @@ import java.nio.file.Path;
|
|||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
@ -43,14 +44,14 @@ public interface CliToolProvider {
|
|||
/**
|
||||
* Loads a tool provider from the Elasticsearch distribution.
|
||||
*
|
||||
* @param sysprops the system properties of the CLI process
|
||||
* @param toolname the name of the tool to load
|
||||
* @param libs the library directories to load, relative to the Elasticsearch homedir
|
||||
* @return the instance of the loaded tool
|
||||
* @throws AssertionError if the given toolname cannot be found or there are more than one tools found with the same name
|
||||
*/
|
||||
static CliToolProvider load(String toolname, String libs) {
|
||||
// the ES homedir is always our working dir
|
||||
Path homeDir = Paths.get("").toAbsolutePath();
|
||||
static CliToolProvider load(Map<String, String> sysprops, String toolname, String libs) {
|
||||
Path homeDir = Paths.get(sysprops.get("es.path.home")).toAbsolutePath();
|
||||
final ClassLoader cliLoader;
|
||||
if (libs.isBlank()) {
|
||||
cliLoader = ClassLoader.getSystemClassLoader();
|
||||
|
|
|
@ -166,7 +166,8 @@ public class EntitlementBootstrap {
|
|||
return propertyValue;
|
||||
}
|
||||
|
||||
Path dir = Path.of("lib", "entitlement-agent");
|
||||
Path esHome = Path.of(System.getProperty("es.path.home"));
|
||||
Path dir = esHome.resolve("lib/entitlement-agent");
|
||||
if (Files.exists(dir) == false) {
|
||||
throw new IllegalStateException("Directory for entitlement jar does not exist: " + dir);
|
||||
}
|
||||
|
|
|
@ -26,7 +26,8 @@ public class LoaderHelper {
|
|||
return Paths.get(path);
|
||||
}
|
||||
|
||||
Path platformDir = Paths.get("lib", "platform");
|
||||
Path homeDir = Paths.get(System.getProperty("es.path.home"));
|
||||
Path platformDir = homeDir.resolve("lib/platform");
|
||||
|
||||
String osname = System.getProperty("os.name");
|
||||
String os;
|
||||
|
|
|
@ -441,12 +441,17 @@ public abstract class AbstractLocalClusterFactory<S extends LocalClusterSpec, H
|
|||
|
||||
// Patch jvm.options file to update paths
|
||||
String content = Files.readString(jvmOptionsFile);
|
||||
Map<String, String> expansions = getJvmOptionsReplacements();
|
||||
for (String key : expansions.keySet()) {
|
||||
Map<ReplacementKey, String> expansions = getJvmOptionsReplacements();
|
||||
for (var entry : expansions.entrySet()) {
|
||||
ReplacementKey replacement = entry.getKey();
|
||||
String key = replacement.key();
|
||||
if (content.contains(key) == false) {
|
||||
throw new IOException("Template property '" + key + "' not found in template.");
|
||||
key = replacement.fallback();
|
||||
if (content.contains(key) == false) {
|
||||
throw new IOException("Template property '" + replacement + "' not found in template.");
|
||||
}
|
||||
}
|
||||
content = content.replace(key, expansions.get(key));
|
||||
content = content.replace(key, entry.getValue());
|
||||
}
|
||||
Files.writeString(jvmOptionsFile, content);
|
||||
} catch (IOException e) {
|
||||
|
@ -912,15 +917,43 @@ public abstract class AbstractLocalClusterFactory<S extends LocalClusterSpec, H
|
|||
return environment;
|
||||
}
|
||||
|
||||
private Map<String, String> getJvmOptionsReplacements() {
|
||||
return Map.of(
|
||||
"-XX:HeapDumpPath=data",
|
||||
"-XX:HeapDumpPath=" + logsDir,
|
||||
"logs/gc.log",
|
||||
logsDir.resolve("gc.log").toString(),
|
||||
"-XX:ErrorFile=logs/hs_err_pid%p.log",
|
||||
"-XX:ErrorFile=" + logsDir.resolve("hs_err_pid%p.log")
|
||||
);
|
||||
private record ReplacementKey(String key, String fallback) {
|
||||
ReplacementKey {
|
||||
assert fallback == null || fallback.isEmpty() == false; // no empty fallback, which would match anything
|
||||
}
|
||||
}
|
||||
|
||||
private Map<ReplacementKey, String> getJvmOptionsReplacements() {
|
||||
var expansions = new HashMap<ReplacementKey, String>();
|
||||
var version = spec.getVersion();
|
||||
|
||||
ReplacementKey heapDumpPathSub;
|
||||
if (version.before("8.19.0") && version.onOrAfter("6.3.0")) {
|
||||
heapDumpPathSub = new ReplacementKey("-XX:HeapDumpPath=data", null);
|
||||
} else {
|
||||
// temporarily fall back to the old substitution so both old and new work during backport
|
||||
heapDumpPathSub = new ReplacementKey("# -XX:HeapDumpPath=/heap/dump/path", "-XX:HeapDumpPath=data");
|
||||
}
|
||||
expansions.put(heapDumpPathSub, "-XX:HeapDumpPath=" + logsDir);
|
||||
|
||||
ReplacementKey gcLogSub;
|
||||
if (version.before("8.19.0") && version.onOrAfter("6.2.0")) {
|
||||
gcLogSub = new ReplacementKey("logs/gc.log", null);
|
||||
} else {
|
||||
// temporarily check the old substitution first so both old and new work during backport
|
||||
gcLogSub = new ReplacementKey("logs/gc.log", "gc.log");
|
||||
}
|
||||
expansions.put(gcLogSub, logsDir.resolve("gc.log").toString());
|
||||
|
||||
ReplacementKey errorFileSub;
|
||||
if (version.before("8.19.0") && version.getMajor() >= 7) {
|
||||
errorFileSub = new ReplacementKey("-XX:ErrorFile=logs/hs_err_pid%p.log", null);
|
||||
} else {
|
||||
// temporarily check the old substitution first so both old and new work during backport
|
||||
errorFileSub = new ReplacementKey("-XX:ErrorFile=logs/hs_err_pid%p.log", "-XX:ErrorFile=hs_err_pid%p.log");
|
||||
}
|
||||
expansions.put(errorFileSub, "-XX:ErrorFile=" + logsDir.resolve("hs_err_pid%p.log"));
|
||||
return expansions;
|
||||
}
|
||||
|
||||
private void runToolScript(String tool, String input, String... args) {
|
||||
|
|
|
@ -1590,7 +1590,7 @@ public class TimestampFormatFinderTests extends TextStructureTestCase {
|
|||
[2018-06-27T11:59:22,202][INFO ][o.e.e.NodeEnvironment ] [node-0] heap size [494.9mb], compressed ordinary object pointers [true]
|
||||
[2018-06-27T11:59:22,204][INFO ][o.e.n.Node ] [node-0] node name [node-0], node ID [Ha1gD8nNSDqjd6PIyu3DJA]
|
||||
[2018-06-27T11:59:22,204][INFO ][o.e.n.Node ] [node-0] version[6.4.0-SNAPSHOT], pid[2785], build[default/zip/3c60efa/2018-06-26T14:55:15.206676Z], OS[Mac OS X/10.12.6/x86_64], JVM["Oracle Corporation"/Java HotSpot(TM) 64-Bit Server VM/10/10+46]
|
||||
[2018-06-27T11:59:22,205][INFO ][o.e.n.Node ] [node-0] JVM arguments [-Xms1g, -Xmx1g, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -XX:-OmitStackTraceInFastThrow, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Djava.io.tmpdir=/var/folders/k5/5sqcdlps5sg3cvlp783gcz740000h0/T/elasticsearch.nFUyeMH1, -XX:+HeapDumpOnOutOfMemoryError, -XX:HeapDumpPath=data, -XX:ErrorFile=logs/hs_err_pid%p.log, -Xlog:gc*,gc+age=trace,safepoint:file=logs/gc.log:utctime,level,pid,tags:filecount=32,filesize=64m, -Djava.locale.providers=COMPAT, -Dio.netty.allocator.type=unpooled, -ea, -esa, -Xms512m, -Xmx512m, -Des.path.home=/Users/dave/elasticsearch/distribution/build/cluster/run node0/elasticsearch-6.4.0-SNAPSHOT, -Des.path.conf=/Users/dave/elasticsearch/distribution/build/cluster/run node0/elasticsearch-6.4.0-SNAPSHOT/config, -Des.distribution.flavor=default, -Des.distribution.type=zip]
|
||||
[2018-06-27T11:59:22,205][INFO ][o.e.n.Node ] [node-0] JVM arguments [-Xms1g, -Xmx1g, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -XX:-OmitStackTraceInFastThrow, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Djava.io.tmpdir=/var/folders/k5/5sqcdlps5sg3cvlp783gcz740000h0/T/elasticsearch.nFUyeMH1, -XX:+HeapDumpOnOutOfMemoryError, -XX:ErrorFile=hs_err_pid%p.log, -Xlog:gc*,gc+age=trace,safepoint:file=gc.log:utctime,level,pid,tags:filecount=32,filesize=64m, -Djava.locale.providers=COMPAT, -Dio.netty.allocator.type=unpooled, -ea, -esa, -Xms512m, -Xmx512m, -Des.path.home=/Users/dave/elasticsearch/distribution/build/cluster/run node0/elasticsearch-6.4.0-SNAPSHOT, -Des.path.conf=/Users/dave/elasticsearch/distribution/build/cluster/run node0/elasticsearch-6.4.0-SNAPSHOT/config, -Des.distribution.flavor=default, -Des.distribution.type=zip]
|
||||
[2018-06-27T11:59:22,205][WARN ][o.e.n.Node ] [node-0] version [6.4.0-SNAPSHOT] is a pre-release version of Elasticsearch and is not suitable for production
|
||||
[2018-06-27T11:59:23,585][INFO ][o.e.p.PluginsService ] [node-0] loaded module [aggs-matrix-stats]
|
||||
[2018-06-27T11:59:23,586][INFO ][o.e.p.PluginsService ] [node-0] loaded module [analysis-common]
|
||||
|
|
Loading…
Add table
Reference in a new issue