Add the ability to reload search analyzers during shard recovery (#97421)

This change adds the ability for reloadable search analysers to adapt their loading based on
the index creation context. It is useful for reloadable search analysers that need to load
expensive resources from indices or disk. In such case they can defer the loading of the
resource during the shard recovery and avoid blocking a master or a create index thread.
---------

Co-authored-by: Mayya Sharipova <mayya.sharipova@elastic.co>
This commit is contained in:
Jim Ferenczi 2023-07-07 17:19:47 +01:00 committed by GitHub
parent c508f6172e
commit bccf4eeed2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
36 changed files with 225 additions and 95 deletions

View file

@ -154,7 +154,6 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
private final SetOnce<ScriptService> scriptServiceHolder = new SetOnce<>(); private final SetOnce<ScriptService> scriptServiceHolder = new SetOnce<>();
private final SetOnce<SynonymsManagementAPIService> synonymsManagementServiceHolder = new SetOnce<>(); private final SetOnce<SynonymsManagementAPIService> synonymsManagementServiceHolder = new SetOnce<>();
private final SetOnce<ThreadPool> threadPoolHolder = new SetOnce<>();
@Override @Override
public Collection<Object> createComponents( public Collection<Object> createComponents(
@ -175,7 +174,6 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
) { ) {
this.scriptServiceHolder.set(scriptService); this.scriptServiceHolder.set(scriptService);
this.synonymsManagementServiceHolder.set(new SynonymsManagementAPIService(client)); this.synonymsManagementServiceHolder.set(new SynonymsManagementAPIService(client));
this.threadPoolHolder.set(threadPool);
return Collections.emptyList(); return Collections.emptyList();
} }
@ -341,22 +339,11 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
filters.put("stemmer", StemmerTokenFilterFactory::new); filters.put("stemmer", StemmerTokenFilterFactory::new);
filters.put( filters.put(
"synonym", "synonym",
requiresAnalysisSettings( requiresAnalysisSettings((i, e, n, s) -> new SynonymTokenFilterFactory(i, e, n, s, synonymsManagementServiceHolder.get()))
(i, e, n, s) -> new SynonymTokenFilterFactory(i, e, n, s, synonymsManagementServiceHolder.get(), threadPoolHolder.get())
)
); );
filters.put( filters.put(
"synonym_graph", "synonym_graph",
requiresAnalysisSettings( requiresAnalysisSettings((i, e, n, s) -> new SynonymGraphTokenFilterFactory(i, e, n, s, synonymsManagementServiceHolder.get()))
(i, e, n, s) -> new SynonymGraphTokenFilterFactory(
i,
e,
n,
s,
synonymsManagementServiceHolder.get(),
threadPoolHolder.get()
)
)
); );
filters.put("trim", TrimTokenFilterFactory::new); filters.put("trim", TrimTokenFilterFactory::new);
filters.put("truncate", requiresAnalysisSettings(TruncateTokenFilterFactory::new)); filters.put("truncate", requiresAnalysisSettings(TruncateTokenFilterFactory::new));

View file

@ -16,6 +16,7 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
import org.elasticsearch.index.analysis.AnalysisMode; import org.elasticsearch.index.analysis.AnalysisMode;
@ -51,6 +52,7 @@ public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
@Override @Override
public TokenFilterFactory getChainAwareTokenFilterFactory( public TokenFilterFactory getChainAwareTokenFilterFactory(
IndexCreationContext context,
TokenizerFactory tokenizer, TokenizerFactory tokenizer,
List<CharFilterFactory> charFilters, List<CharFilterFactory> charFilters,
List<TokenFilterFactory> previousTokenFilters, List<TokenFilterFactory> previousTokenFilters,
@ -66,7 +68,7 @@ public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
String[] parts = Strings.tokenizeToStringArray(filter, ","); String[] parts = Strings.tokenizeToStringArray(filter, ",");
if (parts.length == 1) { if (parts.length == 1) {
TokenFilterFactory factory = resolveFilterFactory(allFilters, parts[0]); TokenFilterFactory factory = resolveFilterFactory(allFilters, parts[0]);
factory = factory.getChainAwareTokenFilterFactory(tokenizer, charFilters, previousTokenFilters, allFilters); factory = factory.getChainAwareTokenFilterFactory(context, tokenizer, charFilters, previousTokenFilters, allFilters);
filters.add(factory); filters.add(factory);
mode = mode.merge(factory.getAnalysisMode()); mode = mode.merge(factory.getAnalysisMode());
} else { } else {
@ -74,7 +76,7 @@ public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
List<TokenFilterFactory> chain = new ArrayList<>(); List<TokenFilterFactory> chain = new ArrayList<>();
for (String subfilter : parts) { for (String subfilter : parts) {
TokenFilterFactory factory = resolveFilterFactory(allFilters, subfilter); TokenFilterFactory factory = resolveFilterFactory(allFilters, subfilter);
factory = factory.getChainAwareTokenFilterFactory(tokenizer, charFilters, existingChain, allFilters); factory = factory.getChainAwareTokenFilterFactory(context, tokenizer, charFilters, existingChain, allFilters);
chain.add(factory); chain.add(factory);
existingChain.add(factory); existingChain.add(factory);
mode = mode.merge(factory.getAnalysisMode()); mode = mode.merge(factory.getAnalysisMode());

View file

@ -11,6 +11,7 @@ package org.elasticsearch.analysis.common;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter; import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.CharFilterFactory;
@ -57,6 +58,7 @@ public class ScriptedConditionTokenFilterFactory extends AbstractTokenFilterFact
@Override @Override
public TokenFilterFactory getChainAwareTokenFilterFactory( public TokenFilterFactory getChainAwareTokenFilterFactory(
IndexCreationContext context,
TokenizerFactory tokenizer, TokenizerFactory tokenizer,
List<CharFilterFactory> charFilters, List<CharFilterFactory> charFilters,
List<TokenFilterFactory> previousTokenFilters, List<TokenFilterFactory> previousTokenFilters,
@ -71,7 +73,7 @@ public class ScriptedConditionTokenFilterFactory extends AbstractTokenFilterFact
"ScriptedConditionTokenFilter [" + name() + "] refers to undefined token filter [" + filter + "]" "ScriptedConditionTokenFilter [" + name() + "] refers to undefined token filter [" + filter + "]"
); );
} }
tff = tff.getChainAwareTokenFilterFactory(tokenizer, charFilters, existingChain, allFilters); tff = tff.getChainAwareTokenFilterFactory(context, tokenizer, charFilters, existingChain, allFilters);
filters.add(tff); filters.add(tff);
existingChain.add(tff); existingChain.add(tff);
} }

View file

@ -14,13 +14,13 @@ import org.apache.lucene.analysis.synonym.SynonymGraphFilter;
import org.apache.lucene.analysis.synonym.SynonymMap; import org.apache.lucene.analysis.synonym.SynonymMap;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisMode; import org.elasticsearch.index.analysis.AnalysisMode;
import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.synonyms.SynonymsManagementAPIService; import org.elasticsearch.synonyms.SynonymsManagementAPIService;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.List; import java.util.List;
import java.util.function.Function; import java.util.function.Function;
@ -32,10 +32,9 @@ public class SynonymGraphTokenFilterFactory extends SynonymTokenFilterFactory {
Environment env, Environment env,
String name, String name,
Settings settings, Settings settings,
SynonymsManagementAPIService synonymsManagementAPIService, SynonymsManagementAPIService synonymsManagementAPIService
ThreadPool threadPool
) { ) {
super(indexSettings, env, name, settings, synonymsManagementAPIService, threadPool); super(indexSettings, env, name, settings, synonymsManagementAPIService);
} }
@Override @Override
@ -45,13 +44,14 @@ public class SynonymGraphTokenFilterFactory extends SynonymTokenFilterFactory {
@Override @Override
public TokenFilterFactory getChainAwareTokenFilterFactory( public TokenFilterFactory getChainAwareTokenFilterFactory(
IndexCreationContext context,
TokenizerFactory tokenizer, TokenizerFactory tokenizer,
List<CharFilterFactory> charFilters, List<CharFilterFactory> charFilters,
List<TokenFilterFactory> previousTokenFilters, List<TokenFilterFactory> previousTokenFilters,
Function<String, TokenFilterFactory> allFilters Function<String, TokenFilterFactory> allFilters
) { ) {
final Analyzer analyzer = buildSynonymAnalyzer(tokenizer, charFilters, previousTokenFilters); final Analyzer analyzer = buildSynonymAnalyzer(tokenizer, charFilters, previousTokenFilters);
ReaderWithOrigin rulesFromSettings = getRulesFromSettings(environment); ReaderWithOrigin rulesFromSettings = getRulesFromSettings(environment, context);
final SynonymMap synonyms = buildSynonyms(analyzer, rulesFromSettings); final SynonymMap synonyms = buildSynonyms(analyzer, rulesFromSettings);
final String name = name(); final String name = name();
return new TokenFilterFactory() { return new TokenFilterFactory() {

View file

@ -12,11 +12,11 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.synonym.SynonymFilter; import org.apache.lucene.analysis.synonym.SynonymFilter;
import org.apache.lucene.analysis.synonym.SynonymMap; import org.apache.lucene.analysis.synonym.SynonymMap;
import org.elasticsearch.cluster.service.MasterService;
import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
import org.elasticsearch.index.analysis.Analysis; import org.elasticsearch.index.analysis.Analysis;
@ -27,7 +27,6 @@ import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.synonyms.SynonymsAPI; import org.elasticsearch.synonyms.SynonymsAPI;
import org.elasticsearch.synonyms.SynonymsManagementAPIService; import org.elasticsearch.synonyms.SynonymsManagementAPIService;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.Reader; import java.io.Reader;
import java.io.StringReader; import java.io.StringReader;
@ -45,15 +44,13 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
protected final Environment environment; protected final Environment environment;
protected final AnalysisMode analysisMode; protected final AnalysisMode analysisMode;
private final SynonymsManagementAPIService synonymsManagementAPIService; private final SynonymsManagementAPIService synonymsManagementAPIService;
private final ThreadPool threadPool;
SynonymTokenFilterFactory( SynonymTokenFilterFactory(
IndexSettings indexSettings, IndexSettings indexSettings,
Environment env, Environment env,
String name, String name,
Settings settings, Settings settings,
SynonymsManagementAPIService synonymsManagementAPIService, SynonymsManagementAPIService synonymsManagementAPIService
ThreadPool threadPool
) { ) {
super(name, settings); super(name, settings);
this.settings = settings; this.settings = settings;
@ -73,7 +70,6 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
this.analysisMode = updateable ? AnalysisMode.SEARCH_TIME : AnalysisMode.ALL; this.analysisMode = updateable ? AnalysisMode.SEARCH_TIME : AnalysisMode.ALL;
this.environment = env; this.environment = env;
this.synonymsManagementAPIService = synonymsManagementAPIService; this.synonymsManagementAPIService = synonymsManagementAPIService;
this.threadPool = threadPool;
} }
@Override @Override
@ -88,13 +84,14 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
@Override @Override
public TokenFilterFactory getChainAwareTokenFilterFactory( public TokenFilterFactory getChainAwareTokenFilterFactory(
IndexCreationContext context,
TokenizerFactory tokenizer, TokenizerFactory tokenizer,
List<CharFilterFactory> charFilters, List<CharFilterFactory> charFilters,
List<TokenFilterFactory> previousTokenFilters, List<TokenFilterFactory> previousTokenFilters,
Function<String, TokenFilterFactory> allFilters Function<String, TokenFilterFactory> allFilters
) { ) {
final Analyzer analyzer = buildSynonymAnalyzer(tokenizer, charFilters, previousTokenFilters); final Analyzer analyzer = buildSynonymAnalyzer(tokenizer, charFilters, previousTokenFilters);
ReaderWithOrigin rulesFromSettings = getRulesFromSettings(environment); ReaderWithOrigin rulesFromSettings = getRulesFromSettings(environment, context);
final SynonymMap synonyms = buildSynonyms(analyzer, rulesFromSettings); final SynonymMap synonyms = buildSynonyms(analyzer, rulesFromSettings);
final String name = name(); final String name = name();
return new TokenFilterFactory() { return new TokenFilterFactory() {
@ -156,7 +153,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
} }
} }
protected ReaderWithOrigin getRulesFromSettings(Environment env) { protected ReaderWithOrigin getRulesFromSettings(Environment env, IndexCreationContext context) {
if (settings.getAsList("synonyms", null) != null) { if (settings.getAsList("synonyms", null) != null) {
List<String> rulesList = Analysis.getWordList(env, settings, "synonyms"); List<String> rulesList = Analysis.getWordList(env, settings, "synonyms");
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
@ -171,15 +168,17 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
); );
} }
String synonymsSet = settings.get("synonyms_set", null); String synonymsSet = settings.get("synonyms_set", null);
// provide fake synonyms on master thread, as on Master an analyzer is built for validation only // provide fake synonyms on index creation and index metadata checks to ensure that we
if (MasterService.isMasterUpdateThread()) { // don't block a master thread
if (context != IndexCreationContext.RELOAD_ANALYZERS) {
return new ReaderWithOrigin( return new ReaderWithOrigin(
new StringReader("fake rule => fake"), new StringReader("fake rule => fake"),
"fake [" + synonymsSet + "] synonyms_set in .synonyms index" "fake [" + synonymsSet + "] synonyms_set in .synonyms index",
synonymsSet
); );
} }
return new ReaderWithOrigin( return new ReaderWithOrigin(
Analysis.getReaderFromIndex(synonymsSet, threadPool, synonymsManagementAPIService), Analysis.getReaderFromIndex(synonymsSet, synonymsManagementAPIService),
"[" + synonymsSet + "] synonyms_set in .synonyms index", "[" + synonymsSet + "] synonyms_set in .synonyms index",
synonymsSet synonymsSet
); );

View file

@ -16,6 +16,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.MyFilterTokenFilterFactory; import org.elasticsearch.index.analysis.MyFilterTokenFilterFactory;
@ -64,7 +65,7 @@ public class CompoundAnalysisTests extends ESTestCase {
private List<String> analyze(Settings settings, String analyzerName, String text) throws IOException { private List<String> analyze(Settings settings, String analyzerName, String text) throws IOException {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisModule analysisModule = createAnalysisModule(settings); AnalysisModule analysisModule = createAnalysisModule(settings);
IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(idxSettings); IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings);
Analyzer analyzer = indexAnalyzers.get(analyzerName).analyzer(); Analyzer analyzer = indexAnalyzers.get(analyzerName).analyzer();
TokenStream stream = analyzer.tokenStream("", text); TokenStream stream = analyzer.tokenStream("", text);

View file

@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
@ -41,7 +42,7 @@ public class EdgeNGramTokenizerTests extends ESTokenStreamTestCase {
TestEnvironment.newEnvironment(settings), TestEnvironment.newEnvironment(settings),
Collections.singletonList(new CommonAnalysisPlugin()), Collections.singletonList(new CommonAnalysisPlugin()),
new StablePluginsRegistry() new StablePluginsRegistry()
).getAnalysisRegistry().build(idxSettings); ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings);
} }
public void testPreConfiguredTokenizer() throws IOException { public void testPreConfiguredTokenizer() throws IOException {

View file

@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
@ -44,7 +45,7 @@ public class MultiplexerTokenFilterTests extends ESTokenStreamTestCase {
TestEnvironment.newEnvironment(settings), TestEnvironment.newEnvironment(settings),
Collections.singletonList(new CommonAnalysisPlugin()), Collections.singletonList(new CommonAnalysisPlugin()),
new StablePluginsRegistry() new StablePluginsRegistry()
).getAnalysisRegistry().build(idxSettings); ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings);
try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) { try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) {
assertNotNull(analyzer); assertNotNull(analyzer);
@ -79,7 +80,7 @@ public class MultiplexerTokenFilterTests extends ESTokenStreamTestCase {
TestEnvironment.newEnvironment(settings), TestEnvironment.newEnvironment(settings),
Collections.singletonList(new CommonAnalysisPlugin()), Collections.singletonList(new CommonAnalysisPlugin()),
new StablePluginsRegistry() new StablePluginsRegistry()
).getAnalysisRegistry().build(idxSettings); ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings);
try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) { try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) {
assertNotNull(analyzer); assertNotNull(analyzer);

View file

@ -19,6 +19,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
@ -74,7 +75,7 @@ public class PredicateTokenScriptFilterTests extends ESTokenStreamTestCase {
new StablePluginsRegistry() new StablePluginsRegistry()
); );
IndexAnalyzers analyzers = module.getAnalysisRegistry().build(idxSettings); IndexAnalyzers analyzers = module.getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings);
try (NamedAnalyzer analyzer = analyzers.get("myAnalyzer")) { try (NamedAnalyzer analyzer = analyzers.get("myAnalyzer")) {
assertNotNull(analyzer); assertNotNull(analyzer);

View file

@ -19,6 +19,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
@ -74,7 +75,7 @@ public class ScriptedConditionTokenFilterTests extends ESTokenStreamTestCase {
new StablePluginsRegistry() new StablePluginsRegistry()
); );
IndexAnalyzers analyzers = module.getAnalysisRegistry().build(idxSettings); IndexAnalyzers analyzers = module.getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings);
try (NamedAnalyzer analyzer = analyzers.get("myAnalyzer")) { try (NamedAnalyzer analyzer = analyzers.get("myAnalyzer")) {
assertNotNull(analyzer); assertNotNull(analyzer);

View file

@ -268,7 +268,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
for (String factory : bypassingFactories) { for (String factory : bypassingFactories) {
TokenFilterFactory tff = plugin.getTokenFilters().get(factory).get(idxSettings, null, factory, settings); TokenFilterFactory tff = plugin.getTokenFilters().get(factory).get(idxSettings, null, factory, settings);
TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings); TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings);
SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings, null, null); SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings, null);
Analyzer analyzer = SynonymTokenFilterFactory.buildSynonymAnalyzer( Analyzer analyzer = SynonymTokenFilterFactory.buildSynonymAnalyzer(
tok, tok,
Collections.emptyList(), Collections.emptyList(),
@ -338,7 +338,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
for (String factory : disallowedFactories) { for (String factory : disallowedFactories) {
TokenFilterFactory tff = plugin.getTokenFilters().get(factory).get(idxSettings, null, factory, settings); TokenFilterFactory tff = plugin.getTokenFilters().get(factory).get(idxSettings, null, factory, settings);
TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings); TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings);
SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings, null, null); SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings, null);
IllegalArgumentException e = expectThrows( IllegalArgumentException e = expectThrows(
IllegalArgumentException.class, IllegalArgumentException.class,

View file

@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisTestsHelper; import org.elasticsearch.index.analysis.AnalysisTestsHelper;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
@ -196,7 +197,7 @@ public class WordDelimiterGraphTokenFilterFactoryTests extends BaseWordDelimiter
TestEnvironment.newEnvironment(settings), TestEnvironment.newEnvironment(settings),
Collections.singletonList(new CommonAnalysisPlugin()), Collections.singletonList(new CommonAnalysisPlugin()),
new StablePluginsRegistry() new StablePluginsRegistry()
).getAnalysisRegistry().build(idxSettings) ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings)
) { ) {
NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer");
@ -221,7 +222,7 @@ public class WordDelimiterGraphTokenFilterFactoryTests extends BaseWordDelimiter
TestEnvironment.newEnvironment(settings), TestEnvironment.newEnvironment(settings),
Collections.singletonList(new CommonAnalysisPlugin()), Collections.singletonList(new CommonAnalysisPlugin()),
new StablePluginsRegistry() new StablePluginsRegistry()
).getAnalysisRegistry().build(idxSettings) ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings)
) { ) {
NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer");

View file

@ -141,6 +141,25 @@ setup:
query: bye query: bye
- match: { hits.total.value: 1 } - match: { hits.total.value: 1 }
- do:
indices.close:
index: my_index
- do:
indices.open:
index: my_index
# Confirm that the index analyzers are reloaded
- do:
search:
index: my_index
body:
query:
match:
my_field:
query: hola
- match: { hits.total.value: 1 }
--- ---
"Delete the synonyms set and confirm failed reload analyzers details": "Delete the synonyms set and confirm failed reload analyzers details":
- do: - do:

View file

@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.IOUtils;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.AnalyzerComponents; import org.elasticsearch.index.analysis.AnalyzerComponents;
@ -212,6 +213,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeAc
) throws IOException { ) throws IOException {
if (request.tokenizer() != null) { if (request.tokenizer() != null) {
return analysisRegistry.buildCustomAnalyzer( return analysisRegistry.buildCustomAnalyzer(
IndexCreationContext.RELOAD_ANALYZERS,
indexSettings, indexSettings,
false, false,
request.tokenizer(), request.tokenizer(),
@ -221,6 +223,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeAc
} else if (((request.tokenFilters() != null && request.tokenFilters().size() > 0) } else if (((request.tokenFilters() != null && request.tokenFilters().size() > 0)
|| (request.charFilters() != null && request.charFilters().size() > 0))) { || (request.charFilters() != null && request.charFilters().size() > 0))) {
return analysisRegistry.buildCustomAnalyzer( return analysisRegistry.buildCustomAnalyzer(
IndexCreationContext.RELOAD_ANALYZERS,
indexSettings, indexSettings,
true, true,
new NameOrDefinition("keyword"), new NameOrDefinition("keyword"),

View file

@ -35,6 +35,7 @@ import org.elasticsearch.core.CheckedFunction;
import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.IOUtils;
import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Nullable;
import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.cache.query.DisabledQueryCache; import org.elasticsearch.index.cache.query.DisabledQueryCache;
@ -456,7 +457,7 @@ public final class IndexModule {
} }
public IndexService newIndexService( public IndexService newIndexService(
IndexService.IndexCreationContext indexCreationContext, IndexCreationContext indexCreationContext,
NodeEnvironment environment, NodeEnvironment environment,
XContentParserConfiguration parserConfiguration, XContentParserConfiguration parserConfiguration,
IndexService.ShardStoreDeleter shardStoreDeleter, IndexService.ShardStoreDeleter shardStoreDeleter,
@ -501,7 +502,7 @@ public final class IndexModule {
queryCache = DisabledQueryCache.INSTANCE; queryCache = DisabledQueryCache.INSTANCE;
} }
if (IndexService.needsMapperService(indexSettings, indexCreationContext)) { if (IndexService.needsMapperService(indexSettings, indexCreationContext)) {
indexAnalyzers = analysisRegistry.build(indexSettings); indexAnalyzers = analysisRegistry.build(indexCreationContext, indexSettings);
} }
final IndexService indexService = new IndexService( final IndexService indexService = new IndexService(
indexSettings, indexSettings,
@ -636,7 +637,7 @@ public final class IndexModule {
return new MapperService( return new MapperService(
clusterService, clusterService,
indexSettings, indexSettings,
analysisRegistry.build(indexSettings), analysisRegistry.build(IndexCreationContext.METADATA_VERIFICATION, indexSettings),
parserConfiguration, parserConfiguration,
new SimilarityService(indexSettings, scriptService, similarities), new SimilarityService(indexSettings, scriptService, similarities),
mapperRegistry, mapperRegistry,

View file

@ -190,6 +190,8 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
Engine.IndexCommitListener indexCommitListener Engine.IndexCommitListener indexCommitListener
) { ) {
super(indexSettings); super(indexSettings);
assert indexCreationContext != IndexCreationContext.RELOAD_ANALYZERS
: "IndexCreationContext.RELOAD_ANALYZERS should only be used when reloading analysers";
this.allowExpensiveQueries = allowExpensiveQueries; this.allowExpensiveQueries = allowExpensiveQueries;
this.indexSettings = indexSettings; this.indexSettings = indexSettings;
this.parserConfiguration = parserConfiguration; this.parserConfiguration = parserConfiguration;
@ -274,7 +276,8 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
public enum IndexCreationContext { public enum IndexCreationContext {
CREATE_INDEX, CREATE_INDEX,
METADATA_VERIFICATION METADATA_VERIFICATION,
RELOAD_ANALYZERS
} }
public int numberOfShards() { public int numberOfShards() {

View file

@ -52,7 +52,6 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.synonyms.PagedResult; import org.elasticsearch.synonyms.PagedResult;
import org.elasticsearch.synonyms.SynonymRule; import org.elasticsearch.synonyms.SynonymRule;
import org.elasticsearch.synonyms.SynonymsManagementAPIService; import org.elasticsearch.synonyms.SynonymsManagementAPIService;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
@ -298,22 +297,9 @@ public class Analysis {
} }
} }
public static Reader getReaderFromIndex( public static Reader getReaderFromIndex(String synonymsSet, SynonymsManagementAPIService synonymsManagementAPIService) {
String synonymsSet, final PlainActionFuture<PagedResult<SynonymRule>> synonymsLoadingFuture = new PlainActionFuture<>();
ThreadPool threadPool,
SynonymsManagementAPIService synonymsManagementAPIService
) {
// TODO: this is a temporary solution for loading synonyms under feature flag, to be redesigned for GA
final PlainActionFuture<PagedResult<SynonymRule>> synonymsLoadingFuture = new PlainActionFuture<>() {
@Override
protected boolean blockingAllowed() {
// allow blocking while loading synonyms under feature flag
return true;
}
};
threadPool.executor(ThreadPool.Names.SYSTEM_READ).execute(() -> {
synonymsManagementAPIService.getSynonymSetRules(synonymsSet, 0, 10_000, synonymsLoadingFuture); synonymsManagementAPIService.getSynonymSetRules(synonymsSet, 0, 10_000, synonymsLoadingFuture);
});
PagedResult<SynonymRule> results = synonymsLoadingFuture.actionGet(); PagedResult<SynonymRule> results = synonymsLoadingFuture.actionGet();
SynonymRule[] synonymRules = results.pageResults(); SynonymRule[] synonymRules = results.pageResults();

View file

@ -17,6 +17,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.IOUtils;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.AnalysisModule;
@ -41,7 +42,7 @@ import static java.util.Collections.unmodifiableMap;
/** /**
* An internal registry for tokenizer, token filter, char filter and analyzer. * An internal registry for tokenizer, token filter, char filter and analyzer.
* This class exists per node and allows to create per-index {@link IndexAnalyzers} via {@link #build(IndexSettings)} * This class exists per node and allows to create per-index {@link IndexAnalyzers} via {@link #build}
*/ */
public final class AnalysisRegistry implements Closeable { public final class AnalysisRegistry implements Closeable {
public static final String INDEX_ANALYSIS_CHAR_FILTER = "index.analysis.char_filter"; public static final String INDEX_ANALYSIS_CHAR_FILTER = "index.analysis.char_filter";
@ -201,14 +202,23 @@ public final class AnalysisRegistry implements Closeable {
/** /**
* Creates an index-level {@link IndexAnalyzers} from this registry using the given index settings * Creates an index-level {@link IndexAnalyzers} from this registry using the given index settings
* and {@link IndexCreationContext}.
*/ */
public IndexAnalyzers build(IndexSettings indexSettings) throws IOException { public IndexAnalyzers build(IndexCreationContext context, IndexSettings indexSettings) throws IOException {
final Map<String, CharFilterFactory> charFilterFactories = buildCharFilterFactories(indexSettings); final Map<String, CharFilterFactory> charFilterFactories = buildCharFilterFactories(indexSettings);
final Map<String, TokenizerFactory> tokenizerFactories = buildTokenizerFactories(indexSettings); final Map<String, TokenizerFactory> tokenizerFactories = buildTokenizerFactories(indexSettings);
final Map<String, TokenFilterFactory> tokenFilterFactories = buildTokenFilterFactories(indexSettings); final Map<String, TokenFilterFactory> tokenFilterFactories = buildTokenFilterFactories(indexSettings);
final Map<String, AnalyzerProvider<?>> analyzerFactories = buildAnalyzerFactories(indexSettings); final Map<String, AnalyzerProvider<?>> analyzerFactories = buildAnalyzerFactories(indexSettings);
final Map<String, AnalyzerProvider<?>> normalizerFactories = buildNormalizerFactories(indexSettings); final Map<String, AnalyzerProvider<?>> normalizerFactories = buildNormalizerFactories(indexSettings);
return build(indexSettings, analyzerFactories, normalizerFactories, tokenizerFactories, charFilterFactories, tokenFilterFactories); return build(
context,
indexSettings,
analyzerFactories,
normalizerFactories,
tokenizerFactories,
charFilterFactories,
tokenFilterFactories
);
} }
/** /**
@ -217,6 +227,7 @@ public final class AnalysisRegistry implements Closeable {
* Callers are responsible for closing the returned Analyzer * Callers are responsible for closing the returned Analyzer
*/ */
public NamedAnalyzer buildCustomAnalyzer( public NamedAnalyzer buildCustomAnalyzer(
IndexCreationContext context,
IndexSettings indexSettings, IndexSettings indexSettings,
boolean normalizer, boolean normalizer,
NameOrDefinition tokenizer, NameOrDefinition tokenizer,
@ -259,7 +270,7 @@ public final class AnalysisRegistry implements Closeable {
if (normalizer && tff instanceof NormalizingTokenFilterFactory == false) { if (normalizer && tff instanceof NormalizingTokenFilterFactory == false) {
throw new IllegalArgumentException("Custom normalizer may not use filter [" + tff.name() + "]"); throw new IllegalArgumentException("Custom normalizer may not use filter [" + tff.name() + "]");
} }
tff = tff.getChainAwareTokenFilterFactory(tokenizerFactory, charFilterFactories, tokenFilterFactories, name -> { tff = tff.getChainAwareTokenFilterFactory(context, tokenizerFactory, charFilterFactories, tokenFilterFactories, name -> {
try { try {
return getComponentFactory( return getComponentFactory(
indexSettings, indexSettings,
@ -281,7 +292,7 @@ public final class AnalysisRegistry implements Closeable {
charFilterFactories.toArray(new CharFilterFactory[] {}), charFilterFactories.toArray(new CharFilterFactory[] {}),
tokenFilterFactories.toArray(new TokenFilterFactory[] {}) tokenFilterFactories.toArray(new TokenFilterFactory[] {})
); );
return produceAnalyzer("__custom__", new AnalyzerProvider<>() { return produceAnalyzer(context, "__custom__", new AnalyzerProvider<>() {
@Override @Override
public String name() { public String name() {
return "__custom__"; return "__custom__";
@ -589,6 +600,7 @@ public final class AnalysisRegistry implements Closeable {
} }
public static IndexAnalyzers build( public static IndexAnalyzers build(
IndexCreationContext context,
IndexSettings indexSettings, IndexSettings indexSettings,
Map<String, AnalyzerProvider<?>> analyzerProviders, Map<String, AnalyzerProvider<?>> analyzerProviders,
Map<String, AnalyzerProvider<?>> normalizerProviders, Map<String, AnalyzerProvider<?>> normalizerProviders,
@ -603,6 +615,7 @@ public final class AnalysisRegistry implements Closeable {
analyzers.merge( analyzers.merge(
entry.getKey(), entry.getKey(),
produceAnalyzer( produceAnalyzer(
context,
entry.getKey(), entry.getKey(),
entry.getValue(), entry.getValue(),
tokenFilterFactoryFactories, tokenFilterFactoryFactories,
@ -641,6 +654,7 @@ public final class AnalysisRegistry implements Closeable {
analyzers.put( analyzers.put(
DEFAULT_ANALYZER_NAME, DEFAULT_ANALYZER_NAME,
produceAnalyzer( produceAnalyzer(
context,
DEFAULT_ANALYZER_NAME, DEFAULT_ANALYZER_NAME,
new StandardAnalyzerProvider(indexSettings, null, DEFAULT_ANALYZER_NAME, Settings.EMPTY), new StandardAnalyzerProvider(indexSettings, null, DEFAULT_ANALYZER_NAME, Settings.EMPTY),
tokenFilterFactoryFactories, tokenFilterFactoryFactories,
@ -677,6 +691,7 @@ public final class AnalysisRegistry implements Closeable {
} }
private static NamedAnalyzer produceAnalyzer( private static NamedAnalyzer produceAnalyzer(
IndexCreationContext context,
String name, String name,
AnalyzerProvider<?> analyzerFactory, AnalyzerProvider<?> analyzerFactory,
Map<String, TokenFilterFactory> tokenFilters, Map<String, TokenFilterFactory> tokenFilters,
@ -691,7 +706,7 @@ public final class AnalysisRegistry implements Closeable {
*/ */
int overridePositionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP; int overridePositionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
if (analyzerFactory instanceof CustomAnalyzerProvider) { if (analyzerFactory instanceof CustomAnalyzerProvider) {
((CustomAnalyzerProvider) analyzerFactory).build(tokenizers, charFilters, tokenFilters); ((CustomAnalyzerProvider) analyzerFactory).build(context, tokenizers, charFilters, tokenFilters);
/* /*
* Custom analyzers already default to the correct, version * Custom analyzers already default to the correct, version
* dependent positionIncrementGap and the user is be able to * dependent positionIncrementGap and the user is be able to

View file

@ -9,6 +9,7 @@
package org.elasticsearch.index.analysis; package org.elasticsearch.index.analysis;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -38,6 +39,7 @@ public final class AnalyzerComponents {
} }
static AnalyzerComponents createComponents( static AnalyzerComponents createComponents(
IndexCreationContext context,
String name, String name,
Settings analyzerSettings, Settings analyzerSettings,
final Map<String, TokenizerFactory> tokenizers, final Map<String, TokenizerFactory> tokenizers,
@ -77,7 +79,13 @@ public final class AnalyzerComponents {
"Custom Analyzer [" + name + "] failed to find filter under name " + "[" + tokenFilterName + "]" "Custom Analyzer [" + name + "] failed to find filter under name " + "[" + tokenFilterName + "]"
); );
} }
tokenFilter = tokenFilter.getChainAwareTokenFilterFactory(tokenizer, charFiltersList, tokenFilterList, tokenFilters::get); tokenFilter = tokenFilter.getChainAwareTokenFilterFactory(
context,
tokenizer,
charFiltersList,
tokenFilterList,
tokenFilters::get
);
tokenFilterList.add(tokenFilter); tokenFilterList.add(tokenFilter);
} }

View file

@ -10,6 +10,7 @@ package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper;
@ -33,11 +34,12 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Analyz
} }
void build( void build(
final IndexCreationContext context,
final Map<String, TokenizerFactory> tokenizers, final Map<String, TokenizerFactory> tokenizers,
final Map<String, CharFilterFactory> charFilters, final Map<String, CharFilterFactory> charFilters,
final Map<String, TokenFilterFactory> tokenFilters final Map<String, TokenFilterFactory> tokenFilters
) { ) {
customAnalyzer = create(name(), analyzerSettings, tokenizers, charFilters, tokenFilters); customAnalyzer = create(context, name(), analyzerSettings, tokenizers, charFilters, tokenFilters);
} }
/** /**
@ -45,6 +47,7 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Analyz
* and search time use, or a {@link ReloadableCustomAnalyzer} if the components are intended for search time use only. * and search time use, or a {@link ReloadableCustomAnalyzer} if the components are intended for search time use only.
*/ */
private static Analyzer create( private static Analyzer create(
IndexCreationContext context,
String name, String name,
Settings analyzerSettings, Settings analyzerSettings,
Map<String, TokenizerFactory> tokenizers, Map<String, TokenizerFactory> tokenizers,
@ -54,7 +57,7 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Analyz
int positionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP; int positionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
positionIncrementGap = analyzerSettings.getAsInt("position_increment_gap", positionIncrementGap); positionIncrementGap = analyzerSettings.getAsInt("position_increment_gap", positionIncrementGap);
int offsetGap = analyzerSettings.getAsInt("offset_gap", -1); int offsetGap = analyzerSettings.getAsInt("offset_gap", -1);
AnalyzerComponents components = createComponents(name, analyzerSettings, tokenizers, charFilters, tokenFilters); AnalyzerComponents components = createComponents(context, name, analyzerSettings, tokenizers, charFilters, tokenFilters);
if (components.analysisMode().equals(AnalysisMode.SEARCH_TIME)) { if (components.analysisMode().equals(AnalysisMode.SEARCH_TIME)) {
return new ReloadableCustomAnalyzer(components, positionIncrementGap, offsetGap); return new ReloadableCustomAnalyzer(components, positionIncrementGap, offsetGap);
} else { } else {

View file

@ -14,6 +14,7 @@ import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.util.CloseableThreadLocal; import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import java.io.Reader; import java.io.Reader;
import java.util.HashSet; import java.util.HashSet;
@ -136,7 +137,14 @@ public final class ReloadableCustomAnalyzer extends Analyzer implements Analyzer
final Map<String, CharFilterFactory> charFilters, final Map<String, CharFilterFactory> charFilters,
final Map<String, TokenFilterFactory> tokenFilters final Map<String, TokenFilterFactory> tokenFilters
) { ) {
AnalyzerComponents components = AnalyzerComponents.createComponents(name, settings, tokenizers, charFilters, tokenFilters); AnalyzerComponents components = AnalyzerComponents.createComponents(
IndexCreationContext.RELOAD_ANALYZERS,
name,
settings,
tokenizers,
charFilters,
tokenFilters
);
this.components = components; this.components = components;
} }

View file

@ -9,7 +9,9 @@
package org.elasticsearch.index.analysis; package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.synonym.SynonymGraphFilterFactory;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.search.fetch.subphase.highlight.FastVectorHighlighter; import org.elasticsearch.search.fetch.subphase.highlight.FastVectorHighlighter;
import java.util.List; import java.util.List;
@ -41,12 +43,20 @@ public interface TokenFilterFactory {
/** /**
* Rewrite the TokenFilterFactory to take into account the preceding analysis chain, or refer * Rewrite the TokenFilterFactory to take into account the preceding analysis chain, or refer
* to other TokenFilterFactories * to other TokenFilterFactories
* If the token filter is part of the definition of a {@link ReloadableCustomAnalyzer},
* this function is called twice, once at index creation with {@link IndexCreationContext#CREATE_INDEX}
* and then later with {@link IndexCreationContext#RELOAD_ANALYZERS} on shard recovery.
* The {@link IndexCreationContext#RELOAD_ANALYZERS} context should be used to load expensive resources
* on a generic thread pool. See {@link SynonymGraphFilterFactory} for an example of how this context
* is used.
* @param context the IndexCreationContext for the underlying index
* @param tokenizer the TokenizerFactory for the preceding chain * @param tokenizer the TokenizerFactory for the preceding chain
* @param charFilters any CharFilterFactories for the preceding chain * @param charFilters any CharFilterFactories for the preceding chain
* @param previousTokenFilters a list of TokenFilterFactories in the preceding chain * @param previousTokenFilters a list of TokenFilterFactories in the preceding chain
* @param allFilters access to previously defined TokenFilterFactories * @param allFilters access to previously defined TokenFilterFactories
*/ */
default TokenFilterFactory getChainAwareTokenFilterFactory( default TokenFilterFactory getChainAwareTokenFilterFactory(
IndexCreationContext context,
TokenizerFactory tokenizer, TokenizerFactory tokenizer,
List<CharFilterFactory> charFilters, List<CharFilterFactory> charFilters,
List<TokenFilterFactory> previousTokenFilters, List<TokenFilterFactory> previousTokenFilters,

View file

@ -17,6 +17,7 @@ import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersion;
@ -525,7 +526,15 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
return mappingLookup().isMultiField(field); return mappingLookup().isMultiField(field);
} }
public synchronized List<String> reloadSearchAnalyzers(AnalysisRegistry registry, String resource) throws IOException { /**
* Reload any search analyzers that have reloadable components if resource is {@code null},
* otherwise only the provided resource is reloaded.
* @param registry the analysis registry
* @param resource the name of the reloadable resource or {@code null} if all resources should be reloaded.
* @return The names of reloaded resources.
* @throws IOException
*/
public synchronized List<String> reloadSearchAnalyzers(AnalysisRegistry registry, @Nullable String resource) throws IOException {
logger.info("reloading search analyzers"); logger.info("reloading search analyzers");
// TODO this should bust the cache somehow. Tracked in https://github.com/elastic/elasticsearch/issues/66722 // TODO this should bust the cache somehow. Tracked in https://github.com/elastic/elasticsearch/issues/66722
return indexAnalyzers.reload(registry, indexSettings, resource); return indexAnalyzers.reload(registry, indexSettings, resource);

View file

@ -18,6 +18,7 @@ import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceAlreadyExistsException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.mapping.put.AutoPutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.AutoPutMappingAction;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
@ -626,8 +627,35 @@ public class IndicesService extends AbstractLifecycleComponent
} }
} }
}; };
final IndexEventListener beforeIndexShardRecovery = new IndexEventListener() {
volatile boolean reloaded;
@Override
public void beforeIndexShardRecovery(IndexShard indexShard, IndexSettings indexSettings, ActionListener<Void> listener) {
try {
if (indexShard.mapperService() != null) {
// we need to reload once, not on every shard recovery in case multiple shards are on the same node
if (reloaded == false) {
synchronized (indexShard.mapperService()) {
if (reloaded == false) {
// we finish loading analyzers from resources here
// during shard recovery in the generic thread pool,
// as this may require longer running operations and blocking calls
indexShard.mapperService().reloadSearchAnalyzers(getAnalysis(), null);
}
reloaded = true;
}
}
}
listener.onResponse(null);
} catch (Exception e) {
listener.onFailure(e);
}
}
};
finalListeners.add(onStoreClose); finalListeners.add(onStoreClose);
finalListeners.add(oldShardsStats); finalListeners.add(oldShardsStats);
finalListeners.add(beforeIndexShardRecovery);
IndexService indexService; IndexService indexService;
try (var ignored = threadPool.getThreadContext().newStoredContext()) { try (var ignored = threadPool.getThreadContext().newStoredContext()) {
indexService = createIndexService( indexService = createIndexService(

View file

@ -20,6 +20,7 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasable;
import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Releasables;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.NameOrDefinition; import org.elasticsearch.index.analysis.NameOrDefinition;
@ -443,7 +444,14 @@ public abstract class AggregationContext implements Releasable {
List<NameOrDefinition> charFilters, List<NameOrDefinition> charFilters,
List<NameOrDefinition> tokenFilters List<NameOrDefinition> tokenFilters
) throws IOException { ) throws IOException {
return analysisRegistry.buildCustomAnalyzer(indexSettings, normalizer, tokenizer, charFilters, tokenFilters); return analysisRegistry.buildCustomAnalyzer(
IndexService.IndexCreationContext.RELOAD_ANALYZERS,
indexSettings,
normalizer,
tokenizer,
charFilters,
tokenFilters
);
} }
@Override @Override

View file

@ -140,7 +140,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
} }
}; };
registry = new AnalysisModule(environment, singletonList(plugin), new StablePluginsRegistry()).getAnalysisRegistry(); registry = new AnalysisModule(environment, singletonList(plugin), new StablePluginsRegistry()).getAnalysisRegistry();
indexAnalyzers = registry.build(this.indexSettings); indexAnalyzers = registry.build(IndexService.IndexCreationContext.RELOAD_ANALYZERS, this.indexSettings);
maxTokenCount = IndexSettings.MAX_TOKEN_COUNT_SETTING.getDefault(settings); maxTokenCount = IndexSettings.MAX_TOKEN_COUNT_SETTING.getDefault(settings);
idxMaxTokenCount = this.indexSettings.getMaxTokenCount(); idxMaxTokenCount = this.indexSettings.getMaxTokenCount();
} }

View file

@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.AnalysisModule;
@ -107,7 +108,7 @@ public class AnalysisRegistryTests extends ESTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build(); .build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
IndexAnalyzers indexAnalyzers = emptyRegistry.build(idxSettings); IndexAnalyzers indexAnalyzers = emptyRegistry.build(IndexCreationContext.CREATE_INDEX, idxSettings);
assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
@ -118,6 +119,7 @@ public class AnalysisRegistryTests extends ESTestCase {
Version version = VersionUtils.randomVersion(random()); Version version = VersionUtils.randomVersion(random());
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
IndexAnalyzers indexAnalyzers = AnalysisRegistry.build( IndexAnalyzers indexAnalyzers = AnalysisRegistry.build(
IndexCreationContext.CREATE_INDEX,
IndexSettingsModule.newIndexSettings("index", settings), IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default", analyzerProvider("default")), singletonMap("default", analyzerProvider("default")),
emptyMap(), emptyMap(),
@ -155,6 +157,7 @@ public class AnalysisRegistryTests extends ESTestCase {
MapperException ex = expectThrows( MapperException ex = expectThrows(
MapperException.class, MapperException.class,
() -> AnalysisRegistry.build( () -> AnalysisRegistry.build(
IndexCreationContext.CREATE_INDEX,
IndexSettingsModule.newIndexSettings("index", settings), IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default", new PreBuiltAnalyzerProvider("default", AnalyzerScope.INDEX, analyzer)), singletonMap("default", new PreBuiltAnalyzerProvider("default", AnalyzerScope.INDEX, analyzer)),
emptyMap(), emptyMap(),
@ -169,7 +172,10 @@ public class AnalysisRegistryTests extends ESTestCase {
public void testNameClashNormalizer() throws IOException { public void testNameClashNormalizer() throws IOException {
// Test out-of-the-box normalizer works OK. // Test out-of-the-box normalizer works OK.
IndexAnalyzers indexAnalyzers = nonEmptyRegistry.build(IndexSettingsModule.newIndexSettings("index", Settings.EMPTY)); IndexAnalyzers indexAnalyzers = nonEmptyRegistry.build(
IndexCreationContext.CREATE_INDEX,
IndexSettingsModule.newIndexSettings("index", Settings.EMPTY)
);
assertNotNull(indexAnalyzers.getNormalizer("lowercase")); assertNotNull(indexAnalyzers.getNormalizer("lowercase"));
assertThat(indexAnalyzers.getNormalizer("lowercase").normalize("field", "AbC").utf8ToString(), equalTo("abc")); assertThat(indexAnalyzers.getNormalizer("lowercase").normalize("field", "AbC").utf8ToString(), equalTo("abc"));
@ -181,7 +187,7 @@ public class AnalysisRegistryTests extends ESTestCase {
.putList("index.analysis.normalizer.lowercase.filter", "reverse") .putList("index.analysis.normalizer.lowercase.filter", "reverse")
.build(); .build();
indexAnalyzers = nonEmptyRegistry.build(IndexSettingsModule.newIndexSettings("index", settings)); indexAnalyzers = nonEmptyRegistry.build(IndexCreationContext.CREATE_INDEX, IndexSettingsModule.newIndexSettings("index", settings));
assertNotNull(indexAnalyzers.getNormalizer("lowercase")); assertNotNull(indexAnalyzers.getNormalizer("lowercase"));
assertThat(indexAnalyzers.getNormalizer("lowercase").normalize("field", "AbC").utf8ToString(), equalTo("CbA")); assertThat(indexAnalyzers.getNormalizer("lowercase").normalize("field", "AbC").utf8ToString(), equalTo("CbA"));
} }
@ -193,6 +199,7 @@ public class AnalysisRegistryTests extends ESTestCase {
IllegalArgumentException e = expectThrows( IllegalArgumentException e = expectThrows(
IllegalArgumentException.class, IllegalArgumentException.class,
() -> AnalysisRegistry.build( () -> AnalysisRegistry.build(
IndexCreationContext.CREATE_INDEX,
IndexSettingsModule.newIndexSettings("index", settings), IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default_index", defaultIndex), singletonMap("default_index", defaultIndex),
emptyMap(), emptyMap(),
@ -208,6 +215,7 @@ public class AnalysisRegistryTests extends ESTestCase {
Version version = VersionUtils.randomVersion(random()); Version version = VersionUtils.randomVersion(random());
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
IndexAnalyzers indexAnalyzers = AnalysisRegistry.build( IndexAnalyzers indexAnalyzers = AnalysisRegistry.build(
IndexCreationContext.CREATE_INDEX,
IndexSettingsModule.newIndexSettings("index", settings), IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default_search", analyzerProvider("default_search")), singletonMap("default_search", analyzerProvider("default_search")),
emptyMap(), emptyMap(),
@ -263,7 +271,7 @@ public class AnalysisRegistryTests extends ESTestCase {
TestEnvironment.newEnvironment(settings), TestEnvironment.newEnvironment(settings),
singletonList(plugin), singletonList(plugin),
new StablePluginsRegistry() new StablePluginsRegistry()
).getAnalysisRegistry().build(idxSettings); ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings);
// This shouldn't contain English stopwords // This shouldn't contain English stopwords
try (NamedAnalyzer custom_analyser = indexAnalyzers.get("custom_analyzer_with_camel_case")) { try (NamedAnalyzer custom_analyser = indexAnalyzers.get("custom_analyzer_with_camel_case")) {
@ -298,8 +306,8 @@ public class AnalysisRegistryTests extends ESTestCase {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(); Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
IndexAnalyzers indexAnalyzers = emptyAnalysisRegistry(settings).build(idxSettings); IndexAnalyzers indexAnalyzers = emptyAnalysisRegistry(settings).build(IndexCreationContext.CREATE_INDEX, idxSettings);
IndexAnalyzers otherIndexAnalyzers = emptyAnalysisRegistry(settings).build(idxSettings); IndexAnalyzers otherIndexAnalyzers = emptyAnalysisRegistry(settings).build(IndexCreationContext.CREATE_INDEX, idxSettings);
final int numIters = randomIntBetween(5, 20); final int numIters = randomIntBetween(5, 20);
for (int i = 0; i < numIters; i++) { for (int i = 0; i < numIters; i++) {
PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values()); PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values());
@ -317,12 +325,18 @@ public class AnalysisRegistryTests extends ESTestCase {
.build(); .build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> emptyAnalysisRegistry(settings).build(idxSettings)); IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> emptyAnalysisRegistry(settings).build(IndexCreationContext.CREATE_INDEX, idxSettings)
);
assertThat(e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer")); assertThat(e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer"));
} }
public void testCloseIndexAnalyzersMultipleTimes() throws IOException { public void testCloseIndexAnalyzersMultipleTimes() throws IOException {
IndexAnalyzers indexAnalyzers = emptyRegistry.build(indexSettingsOfCurrentVersion(Settings.builder())); IndexAnalyzers indexAnalyzers = emptyRegistry.build(
IndexCreationContext.CREATE_INDEX,
indexSettingsOfCurrentVersion(Settings.builder())
);
indexAnalyzers.close(); indexAnalyzers.close();
indexAnalyzers.close(); indexAnalyzers.close();
} }

View file

@ -16,6 +16,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -72,6 +73,7 @@ public class ReloadableCustomAnalyzerTests extends ESTestCase {
Settings analyzerSettings = Settings.builder().put("tokenizer", "standard").putList("filter", "my_filter").build(); Settings analyzerSettings = Settings.builder().put("tokenizer", "standard").putList("filter", "my_filter").build();
AnalyzerComponents components = createComponents( AnalyzerComponents components = createComponents(
IndexCreationContext.CREATE_INDEX,
"my_analyzer", "my_analyzer",
analyzerSettings, analyzerSettings,
testAnalysis.tokenizer, testAnalysis.tokenizer,
@ -92,6 +94,7 @@ public class ReloadableCustomAnalyzerTests extends ESTestCase {
// check that when using regular non-search time filters only, we get an exception // check that when using regular non-search time filters only, we get an exception
final Settings indexAnalyzerSettings = Settings.builder().put("tokenizer", "standard").putList("filter", "lowercase").build(); final Settings indexAnalyzerSettings = Settings.builder().put("tokenizer", "standard").putList("filter", "lowercase").build();
AnalyzerComponents indexAnalyzerComponents = createComponents( AnalyzerComponents indexAnalyzerComponents = createComponents(
IndexCreationContext.CREATE_INDEX,
"my_analyzer", "my_analyzer",
indexAnalyzerSettings, indexAnalyzerSettings,
testAnalysis.tokenizer, testAnalysis.tokenizer,
@ -115,6 +118,7 @@ public class ReloadableCustomAnalyzerTests extends ESTestCase {
Settings analyzerSettings = Settings.builder().put("tokenizer", "standard").putList("filter", "my_filter").build(); Settings analyzerSettings = Settings.builder().put("tokenizer", "standard").putList("filter", "my_filter").build();
AnalyzerComponents components = createComponents( AnalyzerComponents components = createComponents(
IndexCreationContext.RELOAD_ANALYZERS,
"my_analyzer", "my_analyzer",
analyzerSettings, analyzerSettings,
testAnalysis.tokenizer, testAnalysis.tokenizer,

View file

@ -19,6 +19,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.analysis.Analysis; import org.elasticsearch.index.analysis.Analysis;
@ -75,7 +76,7 @@ public class AnalysisModuleTests extends ESTestCase {
public IndexAnalyzers getIndexAnalyzers(AnalysisRegistry registry, Settings settings) throws IOException { public IndexAnalyzers getIndexAnalyzers(AnalysisRegistry registry, Settings settings) throws IOException {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
return registry.build(idxSettings); return registry.build(IndexCreationContext.CREATE_INDEX, idxSettings);
} }
public AnalysisRegistry getNewRegistry(Settings settings) { public AnalysisRegistry getNewRegistry(Settings settings) {

View file

@ -12,6 +12,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
@ -122,7 +123,7 @@ public class IncorrectSetupStablePluginsTests extends ESTestCase {
AnalysisRegistry registry = setupRegistry(mapOfCharFilters); AnalysisRegistry registry = setupRegistry(mapOfCharFilters);
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
return registry.build(idxSettings); return registry.build(IndexCreationContext.CREATE_INDEX, idxSettings);
} }
private AnalysisRegistry setupRegistry(Map<String, PluginInfo> mapOfCharFilters) throws IOException { private AnalysisRegistry setupRegistry(Map<String, PluginInfo> mapOfCharFilters) throws IOException {

View file

@ -16,6 +16,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
@ -55,7 +56,7 @@ public class StableAnalysisPluginsNoSettingsTests extends ESTestCase {
AnalysisRegistry registry = setupRegistry(); AnalysisRegistry registry = setupRegistry();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
return registry.build(idxSettings); return registry.build(IndexCreationContext.CREATE_INDEX, idxSettings);
} }
public void testStablePlugins() throws IOException { public void testStablePlugins() throws IOException {

View file

@ -15,6 +15,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
@ -132,7 +133,7 @@ public class StableAnalysisPluginsWithSettingsTests extends ESTestCase {
AnalysisRegistry registry = setupRegistry(); AnalysisRegistry registry = setupRegistry();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
return registry.build(idxSettings); return registry.build(IndexCreationContext.CREATE_INDEX, idxSettings);
} }
private AnalysisRegistry setupRegistry() throws IOException { private AnalysisRegistry setupRegistry() throws IOException {

View file

@ -12,6 +12,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.AnalysisPlugin;
@ -61,7 +62,7 @@ public class AnalysisTestsHelper {
new StablePluginsRegistry() new StablePluginsRegistry()
).getAnalysisRegistry(); ).getAnalysisRegistry();
return new ESTestCase.TestAnalysis( return new ESTestCase.TestAnalysis(
analysisRegistry.build(indexSettings), analysisRegistry.build(IndexCreationContext.CREATE_INDEX, indexSettings),
analysisRegistry.buildTokenFilterFactories(indexSettings), analysisRegistry.buildTokenFilterFactories(indexSettings),
analysisRegistry.buildTokenizerFactories(indexSettings), analysisRegistry.buildTokenizerFactories(indexSettings),
analysisRegistry.buildCharFilterFactories(indexSettings) analysisRegistry.buildCharFilterFactories(indexSettings)

View file

@ -37,6 +37,7 @@ import org.elasticsearch.core.IOUtils;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
@ -455,7 +456,7 @@ public abstract class AbstractBuilderTestCase extends ESTestCase {
emptyList(), emptyList(),
new StablePluginsRegistry() new StablePluginsRegistry()
); );
IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(idxSettings); IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings);
scriptService = new MockScriptService(Settings.EMPTY, scriptModule.engines, scriptModule.contexts); scriptService = new MockScriptService(Settings.EMPTY, scriptModule.engines, scriptModule.contexts);
similarityService = new SimilarityService(idxSettings, null, Collections.emptyMap()); similarityService = new SimilarityService(idxSettings, null, Collections.emptyMap());
MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry();

View file

@ -83,6 +83,7 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisRegistry;
@ -1749,7 +1750,7 @@ public abstract class ESTestCase extends LuceneTestCase {
AnalysisModule analysisModule = new AnalysisModule(env, Arrays.asList(analysisPlugins), new StablePluginsRegistry()); AnalysisModule analysisModule = new AnalysisModule(env, Arrays.asList(analysisPlugins), new StablePluginsRegistry());
AnalysisRegistry analysisRegistry = analysisModule.getAnalysisRegistry(); AnalysisRegistry analysisRegistry = analysisModule.getAnalysisRegistry();
return new TestAnalysis( return new TestAnalysis(
analysisRegistry.build(indexSettings), analysisRegistry.build(IndexCreationContext.CREATE_INDEX, indexSettings),
analysisRegistry.buildTokenFilterFactories(indexSettings), analysisRegistry.buildTokenFilterFactories(indexSettings),
analysisRegistry.buildTokenizerFactories(indexSettings), analysisRegistry.buildTokenizerFactories(indexSettings),
analysisRegistry.buildCharFilterFactories(indexSettings) analysisRegistry.buildCharFilterFactories(indexSettings)

View file

@ -12,6 +12,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasable;
import org.elasticsearch.core.Tuple; import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.IndexService.IndexCreationContext;
import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig;
@ -112,7 +113,14 @@ public class CategorizationAnalyzer implements Releasable {
return new Tuple<>(globalAnalyzer, Boolean.FALSE); return new Tuple<>(globalAnalyzer, Boolean.FALSE);
} else { } else {
return new Tuple<>( return new Tuple<>(
analysisRegistry.buildCustomAnalyzer(null, false, config.getTokenizer(), config.getCharFilters(), config.getTokenFilters()), analysisRegistry.buildCustomAnalyzer(
IndexCreationContext.RELOAD_ANALYZERS,
null,
false,
config.getTokenizer(),
config.getCharFilters(),
config.getTokenFilters()
),
Boolean.TRUE Boolean.TRUE
); );
} }