Remove multiple paths from NodeEnvironment (#72599)

This commit converts the multiple paths and locks internal to
NodeEnvironment into a singular data path.

relates #71205
This commit is contained in:
Ryan Ernst 2021-05-03 11:11:19 -07:00 committed by GitHub
parent fad5e44b99
commit b6436c51cd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 98 additions and 179 deletions

View file

@ -64,7 +64,6 @@ import java.nio.file.StandardCopyOption;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
@ -144,9 +143,9 @@ public final class NodeEnvironment implements Closeable {
} }
private final Logger logger = LogManager.getLogger(NodeEnvironment.class); private final Logger logger = LogManager.getLogger(NodeEnvironment.class);
private final NodePath[] nodePaths; private final NodePath nodePath;
private final Path sharedDataPath; private final Path sharedDataPath;
private final Lock[] locks; private final Lock lock;
private final AtomicBoolean closed = new AtomicBoolean(false); private final AtomicBoolean closed = new AtomicBoolean(false);
private final Map<ShardId, InternalShardLock> shardLocks = new HashMap<>(); private final Map<ShardId, InternalShardLock> shardLocks = new HashMap<>();
@ -177,8 +176,8 @@ public final class NodeEnvironment implements Closeable {
public static class NodeLock implements Releasable { public static class NodeLock implements Releasable {
private final Lock[] locks; private final Lock lock;
private final NodePath[] nodePaths; private final NodePath nodePath;
public NodeLock(final Logger logger, public NodeLock(final Logger logger,
@ -195,18 +194,18 @@ public final class NodeEnvironment implements Closeable {
final Environment environment, final Environment environment,
final CheckedFunction<Path, Boolean, IOException> pathFunction, final CheckedFunction<Path, Boolean, IOException> pathFunction,
final Function<Path, Path> subPathMapping) throws IOException { final Function<Path, Path> subPathMapping) throws IOException {
nodePaths = new NodePath[1];
locks = new Lock[1];
try { try {
Path dataDir = environment.dataFile(); Path dataDir = environment.dataFile();
Path dir = subPathMapping.apply(dataDir); Path dir = subPathMapping.apply(dataDir);
if (pathFunction.apply(dir) == false) { if (pathFunction.apply(dir) == false) {
lock = null;
nodePath = null;
return; return;
} }
try (Directory luceneDir = FSDirectory.open(dir, NativeFSLockFactory.INSTANCE)) { try (Directory luceneDir = FSDirectory.open(dir, NativeFSLockFactory.INSTANCE)) {
logger.trace("obtaining node lock on {} ...", dir.toAbsolutePath()); logger.trace("obtaining node lock on {} ...", dir.toAbsolutePath());
locks[0] = luceneDir.obtainLock(NODE_LOCK_FILENAME); lock = luceneDir.obtainLock(NODE_LOCK_FILENAME);
nodePaths[0] = new NodePath(dir); nodePath = new NodePath(dir);
} catch (IOException e) { } catch (IOException e) {
logger.trace(() -> new ParameterizedMessage( logger.trace(() -> new ParameterizedMessage(
"failed to obtain node lock on {}", dir.toAbsolutePath()), e); "failed to obtain node lock on {}", dir.toAbsolutePath()), e);
@ -221,17 +220,12 @@ public final class NodeEnvironment implements Closeable {
} }
public NodePath getNodePath() { public NodePath getNodePath() {
return nodePaths[0]; return nodePath;
} }
@Override @Override
public void close() { public void close() {
for (int i = 0; i < locks.length; i++) { IOUtils.closeWhileHandlingException(lock);
if (locks[i] != null) {
IOUtils.closeWhileHandlingException(locks[i]);
}
locks[i] = null;
}
} }
} }
@ -258,10 +252,10 @@ public final class NodeEnvironment implements Closeable {
throw new IllegalStateException(message, e); throw new IllegalStateException(message, e);
} }
this.locks = nodeLock.locks; this.lock = nodeLock.lock;
this.nodePaths = nodeLock.nodePaths; this.nodePath = nodeLock.nodePath;
logger.debug("using node location {}", Arrays.toString(nodePaths)); logger.debug("using node location {}", nodePath);
maybeLogPathDetails(); maybeLogPathDetails();
maybeLogHeapDetails(); maybeLogHeapDetails();
@ -269,7 +263,7 @@ public final class NodeEnvironment implements Closeable {
applySegmentInfosTrace(settings); applySegmentInfosTrace(settings);
assertCanWrite(); assertCanWrite();
ensureAtomicMoveSupported(nodePaths); ensureAtomicMoveSupported(nodePath);
if (upgradeLegacyNodeFolders(logger, settings, environment, nodeLock)) { if (upgradeLegacyNodeFolders(logger, settings, environment, nodeLock)) {
assertCanWrite(); assertCanWrite();
@ -277,13 +271,13 @@ public final class NodeEnvironment implements Closeable {
if (DiscoveryNode.canContainData(settings) == false) { if (DiscoveryNode.canContainData(settings) == false) {
if (DiscoveryNode.isMasterNode(settings) == false) { if (DiscoveryNode.isMasterNode(settings) == false) {
ensureNoIndexMetadata(nodePaths); ensureNoIndexMetadata(nodePath);
} }
ensureNoShardData(nodePaths); ensureNoShardData(nodePath);
} }
this.nodeMetadata = loadNodeMetadata(settings, logger, nodePaths); this.nodeMetadata = loadNodeMetadata(settings, logger, nodePath);
success = true; success = true;
} finally { } finally {
@ -430,43 +424,28 @@ public final class NodeEnvironment implements Closeable {
if (logger.isDebugEnabled()) { if (logger.isDebugEnabled()) {
// Log one line per path.data: // Log one line per path.data:
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
for (NodePath nodePath : nodePaths) { sb.append('\n').append(" -> ").append(nodePath.path.toAbsolutePath());
sb.append('\n').append(" -> ").append(nodePath.path.toAbsolutePath());
FsInfo.Path fsPath = FsProbe.getFSInfo(nodePath); FsInfo.Path fsPath = FsProbe.getFSInfo(nodePath);
sb.append(", free_space [") sb.append(", free_space [")
.append(fsPath.getFree()) .append(fsPath.getFree())
.append("], usable_space [") .append("], usable_space [")
.append(fsPath.getAvailable()) .append(fsPath.getAvailable())
.append("], total_space [") .append("], total_space [")
.append(fsPath.getTotal()) .append(fsPath.getTotal())
.append("], mount [") .append("], mount [")
.append(fsPath.getMount()) .append(fsPath.getMount())
.append("], type [") .append("], type [")
.append(fsPath.getType()) .append(fsPath.getType())
.append(']'); .append(']');
}
logger.debug("node data locations details:{}", sb); logger.debug("node data locations details:{}", sb);
} else if (logger.isInfoEnabled()) { } else if (logger.isInfoEnabled()) {
FsInfo.Path totFSPath = new FsInfo.Path(); Path path = nodePath.path.toAbsolutePath();
Set<String> allTypes = new HashSet<>(); FsInfo.Path fsPath = FsProbe.getFSInfo(nodePath);
Set<String> allMounts = new HashSet<>();
for (NodePath nodePath : nodePaths) {
FsInfo.Path fsPath = FsProbe.getFSInfo(nodePath);
String mount = fsPath.getMount();
if (allMounts.contains(mount) == false) {
allMounts.add(mount);
String type = fsPath.getType();
if (type != null) {
allTypes.add(type);
}
totFSPath.add(fsPath);
}
}
// Just log a 1-line summary: // Just log a 1-line summary:
logger.info("using [{}] data paths, mounts [{}], net usable_space [{}], net total_space [{}], types [{}]", logger.info("using data path: mount [{}], usable_space [{}], total_space [{}], type [{}]",
nodePaths.length, allMounts, totFSPath.getAvailable(), totFSPath.getTotal(), toString(allTypes)); fsPath.getMount(), fsPath.getAvailable(), fsPath.getTotal(), fsPath.getType());
} }
} }
@ -481,29 +460,15 @@ public final class NodeEnvironment implements Closeable {
* scans the node paths and loads existing metadata file. If not found a new meta data will be generated * scans the node paths and loads existing metadata file. If not found a new meta data will be generated
*/ */
private static NodeMetadata loadNodeMetadata(Settings settings, Logger logger, private static NodeMetadata loadNodeMetadata(Settings settings, Logger logger,
NodePath... nodePaths) throws IOException { NodePath nodePath) throws IOException {
final Path[] paths = Arrays.stream(nodePaths).map(np -> np.path).toArray(Path[]::new); final Path path = nodePath.path;
NodeMetadata metadata = PersistedClusterStateService.nodeMetadata(paths); NodeMetadata metadata = PersistedClusterStateService.nodeMetadata(path);
if (metadata == null) { if (metadata == null) {
// load legacy metadata // load legacy metadata
final Set<String> nodeIds = new HashSet<>(); final NodeMetadata legacyMetadata = NodeMetadata.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, path);
for (final Path path : paths) {
final NodeMetadata oldStyleMetadata = NodeMetadata.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, path);
if (oldStyleMetadata != null) {
nodeIds.add(oldStyleMetadata.nodeId());
}
}
if (nodeIds.size() > 1) {
throw new IllegalStateException(
"data paths " + Arrays.toString(paths) + " belong to multiple nodes with IDs " + nodeIds);
}
// load legacy metadata
final NodeMetadata legacyMetadata = NodeMetadata.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, paths);
if (legacyMetadata == null) { if (legacyMetadata == null) {
assert nodeIds.isEmpty() : nodeIds;
metadata = new NodeMetadata(generateNodeId(settings), Version.CURRENT); metadata = new NodeMetadata(generateNodeId(settings), Version.CURRENT);
} else { } else {
assert nodeIds.equals(Collections.singleton(legacyMetadata.nodeId())) : nodeIds + " doesn't match " + legacyMetadata;
metadata = legacyMetadata; metadata = legacyMetadata;
} }
} }
@ -885,7 +850,7 @@ public final class NodeEnvironment implements Closeable {
} }
public boolean hasNodeFile() { public boolean hasNodeFile() {
return nodePaths != null && locks != null; return nodePath != null && lock != null;
} }
/** /**
@ -894,7 +859,7 @@ public final class NodeEnvironment implements Closeable {
*/ */
public Path nodeDataPath() { public Path nodeDataPath() {
assertEnvIsLocked(); assertEnvIsLocked();
return nodePaths[0].path; return nodePath.path;
} }
/** /**
@ -920,10 +885,10 @@ public final class NodeEnvironment implements Closeable {
*/ */
public NodePath nodePath() { public NodePath nodePath() {
assertEnvIsLocked(); assertEnvIsLocked();
if (nodePaths == null || locks == null) { if (nodePath == null || lock == null) {
throw new IllegalStateException("node is not configured to store local location"); throw new IllegalStateException("node is not configured to store local location");
} }
return nodePaths[0]; return nodePath;
} }
/** /**
@ -931,11 +896,9 @@ public final class NodeEnvironment implements Closeable {
*/ */
public Path indexPath(Index index) { public Path indexPath(Index index) {
assertEnvIsLocked(); assertEnvIsLocked();
return nodePaths[0].resolve(index); return nodePath.resolve(index);
} }
/** /**
* Returns all shard paths excluding custom shard path. Note: Shards are only allocated on one of the * Returns all shard paths excluding custom shard path. Note: Shards are only allocated on one of the
* returned paths. The returned array may contain paths to non-existing directories. * returned paths. The returned array may contain paths to non-existing directories.
@ -946,7 +909,7 @@ public final class NodeEnvironment implements Closeable {
*/ */
public Path availableShardPath(ShardId shardId) { public Path availableShardPath(ShardId shardId) {
assertEnvIsLocked(); assertEnvIsLocked();
return nodePaths[0].resolve(shardId); return nodePath.resolve(shardId);
} }
/** /**
@ -961,15 +924,11 @@ public final class NodeEnvironment implements Closeable {
* @param excludeIndexPathIdsPredicate folder names to exclude * @param excludeIndexPathIdsPredicate folder names to exclude
*/ */
public Set<String> availableIndexFolders(Predicate<String> excludeIndexPathIdsPredicate) throws IOException { public Set<String> availableIndexFolders(Predicate<String> excludeIndexPathIdsPredicate) throws IOException {
if (nodePaths == null || locks == null) { if (nodePath == null || lock == null) {
throw new IllegalStateException("node is not configured to store local location"); throw new IllegalStateException("node is not configured to store local location");
} }
assertEnvIsLocked(); assertEnvIsLocked();
Set<String> indexFolders = new HashSet<>(); return availableIndexFoldersForPath(nodePath, excludeIndexPathIdsPredicate);
for (NodePath nodePath : nodePaths) {
indexFolders.addAll(availableIndexFoldersForPath(nodePath, excludeIndexPathIdsPredicate));
}
return indexFolders;
} }
@ -994,7 +953,7 @@ public final class NodeEnvironment implements Closeable {
*/ */
public Set<String> availableIndexFoldersForPath(final NodePath nodePath, Predicate<String> excludeIndexPathIdsPredicate) public Set<String> availableIndexFoldersForPath(final NodePath nodePath, Predicate<String> excludeIndexPathIdsPredicate)
throws IOException { throws IOException {
if (nodePaths == null || locks == null) { if (nodePath == null || lock == null) {
throw new IllegalStateException("node is not configured to store local location"); throw new IllegalStateException("node is not configured to store local location");
} }
assertEnvIsLocked(); assertEnvIsLocked();
@ -1017,11 +976,11 @@ public final class NodeEnvironment implements Closeable {
* Resolves all existing paths to <code>indexFolderName</code> in ${data.paths}/indices * Resolves all existing paths to <code>indexFolderName</code> in ${data.paths}/indices
*/ */
public Path resolveIndexFolder(String indexFolderName) { public Path resolveIndexFolder(String indexFolderName) {
if (nodePaths == null || locks == null) { if (nodePath == null || lock == null) {
throw new IllegalStateException("node is not configured to store local location"); throw new IllegalStateException("node is not configured to store local location");
} }
assertEnvIsLocked(); assertEnvIsLocked();
return nodePaths[0].indicesPath.resolve(indexFolderName); return nodePath.indicesPath.resolve(indexFolderName);
} }
/** /**
@ -1034,44 +993,12 @@ public final class NodeEnvironment implements Closeable {
*/ */
public Set<ShardId> findAllShardIds(final Index index) throws IOException { public Set<ShardId> findAllShardIds(final Index index) throws IOException {
assert index != null; assert index != null;
if (nodePaths == null || locks == null) { if (nodePath == null || lock == null) {
throw new IllegalStateException("node is not configured to store local location"); throw new IllegalStateException("node is not configured to store local location");
} }
assertEnvIsLocked(); assertEnvIsLocked();
final Set<ShardId> shardIds = new HashSet<>(); final Set<ShardId> shardIds = new HashSet<>();
final String indexUniquePathId = index.getUUID(); final Path indexPath = nodePath.indicesPath.resolve(index.getUUID());
for (final NodePath nodePath : nodePaths) {
shardIds.addAll(findAllShardsForIndex(nodePath.indicesPath.resolve(indexUniquePathId), index));
}
return shardIds;
}
/**
* Find all the shards for this index, returning a map of the {@code NodePath} to the number of shards on that path
* @param index the index by which to filter shards
* @return a map of NodePath to count of the shards for the index on that path
* @throws IOException if an IOException occurs
*/
public Map<NodePath, Long> shardCountPerPath(final Index index) throws IOException {
assert index != null;
if (nodePaths == null || locks == null) {
throw new IllegalStateException("node is not configured to store local location");
}
assertEnvIsLocked();
final Map<NodePath, Long> shardCountPerPath = new HashMap<>();
final String indexUniquePathId = index.getUUID();
for (final NodePath nodePath : nodePaths) {
Path indexLocation = nodePath.indicesPath.resolve(indexUniquePathId);
if (Files.isDirectory(indexLocation)) {
shardCountPerPath.put(nodePath, (long) findAllShardsForIndex(indexLocation, index).size());
}
}
return shardCountPerPath;
}
private static Set<ShardId> findAllShardsForIndex(Path indexPath, Index index) throws IOException {
assert indexPath.getFileName().toString().equals(index.getUUID());
Set<ShardId> shardIds = new HashSet<>();
if (Files.isDirectory(indexPath)) { if (Files.isDirectory(indexPath)) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(indexPath)) { try (DirectoryStream<Path> stream = Files.newDirectoryStream(indexPath)) {
for (Path shardPath : stream) { for (Path shardPath : stream) {
@ -1089,28 +1016,24 @@ public final class NodeEnvironment implements Closeable {
@Override @Override
public void close() { public void close() {
if (closed.compareAndSet(false, true) && locks != null) { if (closed.compareAndSet(false, true) && lock != null) {
for (Lock lock : locks) { try {
try { logger.trace("releasing lock [{}]", lock);
logger.trace("releasing lock [{}]", lock); lock.close();
lock.close(); } catch (IOException e) {
} catch (IOException e) { logger.trace(() -> new ParameterizedMessage("failed to release lock [{}]", lock), e);
logger.trace(() -> new ParameterizedMessage("failed to release lock [{}]", lock), e);
}
} }
} }
} }
private void assertEnvIsLocked() { private void assertEnvIsLocked() {
if (closed.get() == false && locks != null) { if (closed.get() == false && lock != null) {
for (Lock lock : locks) { try {
try { lock.ensureValid();
lock.ensureValid(); } catch (IOException e) {
} catch (IOException e) { logger.warn("lock assertion failed", e);
logger.warn("lock assertion failed", e); throw new IllegalStateException("environment is not locked", e);
throw new IllegalStateException("environment is not locked", e);
}
} }
} }
} }
@ -1121,31 +1044,29 @@ public final class NodeEnvironment implements Closeable {
* not supported by the filesystem. This test is executed on each of the data directories. * not supported by the filesystem. This test is executed on each of the data directories.
* This method cleans up all files even in the case of an error. * This method cleans up all files even in the case of an error.
*/ */
private static void ensureAtomicMoveSupported(final NodePath[] nodePaths) throws IOException { private static void ensureAtomicMoveSupported(final NodePath nodePath) throws IOException {
for (NodePath nodePath : nodePaths) { assert Files.isDirectory(nodePath.path) : nodePath.path + " is not a directory";
assert Files.isDirectory(nodePath.path) : nodePath.path + " is not a directory"; final Path src = nodePath.path.resolve(TEMP_FILE_NAME + ".tmp");
final Path src = nodePath.path.resolve(TEMP_FILE_NAME + ".tmp"); final Path target = nodePath.path.resolve(TEMP_FILE_NAME + ".final");
final Path target = nodePath.path.resolve(TEMP_FILE_NAME + ".final"); try {
Files.deleteIfExists(src);
Files.createFile(src);
Files.move(src, target, StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING);
} catch (AtomicMoveNotSupportedException ex) {
throw new IllegalStateException("atomic_move is not supported by the filesystem on path ["
+ nodePath.path
+ "] atomic_move is required for elasticsearch to work correctly.", ex);
} finally {
try { try {
Files.deleteIfExists(src); Files.deleteIfExists(src);
Files.createFile(src);
Files.move(src, target, StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING);
} catch (AtomicMoveNotSupportedException ex) {
throw new IllegalStateException("atomic_move is not supported by the filesystem on path ["
+ nodePath.path
+ "] atomic_move is required for elasticsearch to work correctly.", ex);
} finally { } finally {
try { Files.deleteIfExists(target);
Files.deleteIfExists(src);
} finally {
Files.deleteIfExists(target);
}
} }
} }
} }
private void ensureNoShardData(final NodePath[] nodePaths) throws IOException { private void ensureNoShardData(final NodePath nodePath) throws IOException {
List<Path> shardDataPaths = collectShardDataPaths(nodePaths); List<Path> shardDataPaths = collectShardDataPaths(nodePath);
if (shardDataPaths.isEmpty() == false) { if (shardDataPaths.isEmpty() == false) {
final String message = String.format( final String message = String.format(
Locale.ROOT, Locale.ROOT,
@ -1157,8 +1078,8 @@ public final class NodeEnvironment implements Closeable {
} }
} }
private void ensureNoIndexMetadata(final NodePath[] nodePaths) throws IOException { private void ensureNoIndexMetadata(final NodePath nodePath) throws IOException {
List<Path> indexMetadataPaths = collectIndexMetadataPaths(nodePaths); List<Path> indexMetadataPaths = collectIndexMetadataPaths(nodePath);
if (indexMetadataPaths.isEmpty() == false) { if (indexMetadataPaths.isEmpty() == false) {
final String message = String.format( final String message = String.format(
Locale.ROOT, Locale.ROOT,
@ -1174,8 +1095,8 @@ public final class NodeEnvironment implements Closeable {
/** /**
* Collect the paths containing shard data in the indicated node paths. The returned paths will point to the shard data folder. * Collect the paths containing shard data in the indicated node paths. The returned paths will point to the shard data folder.
*/ */
static List<Path> collectShardDataPaths(NodePath[] nodePaths) throws IOException { static List<Path> collectShardDataPaths(NodePath nodePath) throws IOException {
return collectIndexSubPaths(nodePaths, NodeEnvironment::isShardPath); return collectIndexSubPaths(nodePath, NodeEnvironment::isShardPath);
} }
@ -1183,23 +1104,21 @@ public final class NodeEnvironment implements Closeable {
* Collect the paths containing index meta data in the indicated node paths. The returned paths will point to the * Collect the paths containing index meta data in the indicated node paths. The returned paths will point to the
* {@link MetadataStateFormat#STATE_DIR_NAME} folder * {@link MetadataStateFormat#STATE_DIR_NAME} folder
*/ */
static List<Path> collectIndexMetadataPaths(NodePath[] nodePaths) throws IOException { static List<Path> collectIndexMetadataPaths(NodePath nodePath) throws IOException {
return collectIndexSubPaths(nodePaths, NodeEnvironment::isIndexMetadataPath); return collectIndexSubPaths(nodePath, NodeEnvironment::isIndexMetadataPath);
} }
private static List<Path> collectIndexSubPaths(NodePath[] nodePaths, Predicate<Path> subPathPredicate) throws IOException { private static List<Path> collectIndexSubPaths(NodePath nodePath, Predicate<Path> subPathPredicate) throws IOException {
List<Path> indexSubPaths = new ArrayList<>(); List<Path> indexSubPaths = new ArrayList<>();
for (NodePath nodePath : nodePaths) { Path indicesPath = nodePath.indicesPath;
Path indicesPath = nodePath.indicesPath; if (Files.isDirectory(indicesPath)) {
if (Files.isDirectory(indicesPath)) { try (DirectoryStream<Path> indexStream = Files.newDirectoryStream(indicesPath)) {
try (DirectoryStream<Path> indexStream = Files.newDirectoryStream(indicesPath)) { for (Path indexPath : indexStream) {
for (Path indexPath : indexStream) { if (Files.isDirectory(indexPath)) {
if (Files.isDirectory(indexPath)) { try (Stream<Path> shardStream = Files.list(indexPath)) {
try (Stream<Path> shardStream = Files.list(indexPath)) { shardStream.filter(subPathPredicate)
shardStream.filter(subPathPredicate) .map(Path::toAbsolutePath)
.map(Path::toAbsolutePath) .forEach(indexSubPaths::add);
.forEach(indexSubPaths::add);
}
} }
} }
} }

View file

@ -77,10 +77,10 @@ public class NodeRepurposeCommand extends ElasticsearchNodeCommand {
NodeEnvironment.NodePath[] nodePaths = toNodePaths(dataPaths); NodeEnvironment.NodePath[] nodePaths = toNodePaths(dataPaths);
terminal.println(Terminal.Verbosity.VERBOSE, "Collecting shard data paths"); terminal.println(Terminal.Verbosity.VERBOSE, "Collecting shard data paths");
List<Path> shardDataPaths = NodeEnvironment.collectShardDataPaths(nodePaths); List<Path> shardDataPaths = NodeEnvironment.collectShardDataPaths(nodePaths[0]);
terminal.println(Terminal.Verbosity.VERBOSE, "Collecting index metadata paths"); terminal.println(Terminal.Verbosity.VERBOSE, "Collecting index metadata paths");
List<Path> indexMetadataPaths = NodeEnvironment.collectIndexMetadataPaths(nodePaths); List<Path> indexMetadataPaths = NodeEnvironment.collectIndexMetadataPaths(nodePaths[0]);
Set<Path> indexPaths = uniqueParentPaths(shardDataPaths, indexMetadataPaths); Set<Path> indexPaths = uniqueParentPaths(shardDataPaths, indexMetadataPaths);
@ -116,7 +116,7 @@ public class NodeRepurposeCommand extends ElasticsearchNodeCommand {
NodeEnvironment.NodePath[] nodePaths = toNodePaths(dataPaths); NodeEnvironment.NodePath[] nodePaths = toNodePaths(dataPaths);
terminal.println(Terminal.Verbosity.VERBOSE, "Collecting shard data paths"); terminal.println(Terminal.Verbosity.VERBOSE, "Collecting shard data paths");
List<Path> shardDataPaths = NodeEnvironment.collectShardDataPaths(nodePaths); List<Path> shardDataPaths = NodeEnvironment.collectShardDataPaths(nodePaths[0]);
if (shardDataPaths.isEmpty()) { if (shardDataPaths.isEmpty()) {
terminal.println(NO_SHARD_DATA_TO_CLEAN_UP_FOUND); terminal.println(NO_SHARD_DATA_TO_CLEAN_UP_FOUND);
return; return;