Synthetic _source: support field in many cases (#89950)

This adds support for the `field` scripting API in many but not all
cases. Before this change numbers, dates, and IPs supported the `field`
API when running with _source in synthetic mode because they always have
doc values. This change adds support for `match_only_text`, `store`d
`keyword` fields, and `store`d `text` fields. Two remaining field
configurations work with synthetic _source and do not work with `field`:
* A `text` field with a sub-`keyword` field that has `doc_values` * A
`text` field with a sub-`keyword` field that is `store`d

![image](https://user-images.githubusercontent.com/215970/189217841-4378ed42-e454-42c1-aaf0-6c2c041b29be.png)
This commit is contained in:
Nik Everett 2022-11-10 10:44:06 -05:00 committed by GitHub
parent f83a530daa
commit 74d0d19c0f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
36 changed files with 624 additions and 206 deletions

View file

@ -0,0 +1,5 @@
pr: 89950
summary: "Synthetic _source: support `field` in many cases"
area: TSDB
type: enhancement
issues: []

View file

@ -2382,7 +2382,6 @@ keyword_stored_synthetic:
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO make this work in a follow up
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -2391,13 +2390,11 @@ keyword_stored_synthetic:
field: field:
script: script:
source: "field('keyword_stored').get('missing')" source: "field('keyword_stored').get('missing')"
# - match: { hits.hits.0.fields.field.0: "no doc values" } - match: { hits.hits.0.fields.field.0: "no doc values" }
# - match: { hits.hits.1.fields.field.0: "missing" } - match: { hits.hits.1.fields.field.0: "missing" }
# - match: { hits.hits.2.fields.field.0: "no doc values 0" } # doc values are sorted - match: { hits.hits.2.fields.field.0: "no doc values 0" } # doc values are sorted
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO make this work in a follow up
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -2407,13 +2404,11 @@ keyword_stored_synthetic:
script: script:
source: "/* avoid yaml stash */ $('keyword_stored', 'missing')" source: "/* avoid yaml stash */ $('keyword_stored', 'missing')"
# same as `field('keyword').get('missing')` # same as `field('keyword').get('missing')`
# - match: { hits.hits.0.fields.field.0: "no doc values" } - match: { hits.hits.0.fields.field.0: "no doc values" }
# - match: { hits.hits.1.fields.field.0: "missing" } - match: { hits.hits.1.fields.field.0: "missing" }
# - match: { hits.hits.2.fields.field.0: "no doc values 0" } - match: { hits.hits.2.fields.field.0: "no doc values 0" }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO make this work in a follow up
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -2422,13 +2417,11 @@ keyword_stored_synthetic:
field: field:
script: script:
source: "field('keyword_stored').get(1, 'dne')" source: "field('keyword_stored').get(1, 'dne')"
# - match: { hits.hits.0.fields.field.0: "dne" } - match: { hits.hits.0.fields.field.0: "dne" }
# - match: { hits.hits.1.fields.field.0: "dne" } - match: { hits.hits.1.fields.field.0: "dne" }
# - match: { hits.hits.2.fields.field.0: "no doc values 1" } # doc values are sorted - match: { hits.hits.2.fields.field.0: "no doc values 1" } # doc values are sorted
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO make this work in a follow up
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -2437,10 +2430,9 @@ keyword_stored_synthetic:
field: field:
script: script:
source: "String.join(', ', field('keyword_stored'))" source: "String.join(', ', field('keyword_stored'))"
# - match: { hits.hits.0.fields.field.0: "no doc values" } - match: { hits.hits.0.fields.field.0: "no doc values" }
# - match: { hits.hits.1.fields.field.0: "" } - match: { hits.hits.1.fields.field.0: "" }
# - match: { hits.hits.2.fields.field.0: "no doc values 0, no doc values 1, no doc values 2" } - match: { hits.hits.2.fields.field.0: "no doc values 0, no doc values 1, no doc values 2" }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
--- ---
long: long:
@ -5188,7 +5180,7 @@ text_sub_stored_keyword_synthetic:
script_fields: script_fields:
field: field:
script: script:
source: "String cat = ''; for (String s : field('text_sub_stored_keyword')) { cat += s; } cat + field('text_no_field_data').size();" source: "String cat = ''; for (String s : field('text_sub_stored_keyword')) { cat += s; } cat + field('text_sub_stored_keyword').size();"
# - match: { hits.hits.0.fields.field.0: "Lots of text.1" } # - match: { hits.hits.0.fields.field.0: "Lots of text.1" }
# - match: { hits.hits.1.fields.field.0: "0" } # - match: { hits.hits.1.fields.field.0: "0" }
# - match: { hits.hits.2.fields.field.0: "Lots of text.SOOOOO much texteven more text3" } # - match: { hits.hits.2.fields.field.0: "Lots of text.SOOOOO much texteven more text3" }
@ -5221,7 +5213,6 @@ text_stored_synthetic:
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5230,13 +5221,11 @@ text_stored_synthetic:
field: field:
script: script:
source: "field('text_stored').get('')" source: "field('text_stored').get('')"
# - match: { hits.hits.0.fields.field.0: "Lots of text." } - match: { hits.hits.0.fields.field.0: "Lots of text." }
# - match: { hits.hits.1.fields.field.0: "" } - match: { hits.hits.1.fields.field.0: "" }
# - match: { hits.hits.2.fields.field.0: "Lots of text." } - match: { hits.hits.2.fields.field.0: "Lots of text." }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5245,13 +5234,11 @@ text_stored_synthetic:
field: field:
script: script:
source: "/* avoid yaml stash */ $('text_stored', '')" source: "/* avoid yaml stash */ $('text_stored', '')"
# - match: { hits.hits.0.fields.field.0: "Lots of text." } - match: { hits.hits.0.fields.field.0: "Lots of text." }
# - match: { hits.hits.1.fields.field.0: "" } - match: { hits.hits.1.fields.field.0: "" }
# - match: { hits.hits.2.fields.field.0: "Lots of text." } - match: { hits.hits.2.fields.field.0: "Lots of text." }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5260,13 +5247,11 @@ text_stored_synthetic:
field: field:
script: script:
source: "String defaultText = 'default text'; field('text_stored').get(defaultText)" source: "String defaultText = 'default text'; field('text_stored').get(defaultText)"
# - match: { hits.hits.0.fields.field.0: "Lots of text." } - match: { hits.hits.0.fields.field.0: "Lots of text." }
# - match: { hits.hits.1.fields.field.0: "default text" } - match: { hits.hits.1.fields.field.0: "default text" }
# - match: { hits.hits.2.fields.field.0: "Lots of text." } - match: { hits.hits.2.fields.field.0: "Lots of text." }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5275,13 +5260,11 @@ text_stored_synthetic:
field: field:
script: script:
source: "String defaultText = 'default text'; $('text_stored', defaultText)" source: "String defaultText = 'default text'; $('text_stored', defaultText)"
# - match: { hits.hits.0.fields.field.0: "Lots of text." } - match: { hits.hits.0.fields.field.0: "Lots of text." }
# - match: { hits.hits.1.fields.field.0: "default text" } - match: { hits.hits.1.fields.field.0: "default text" }
# - match: { hits.hits.2.fields.field.0: "Lots of text." } - match: { hits.hits.2.fields.field.0: "Lots of text." }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5290,13 +5273,11 @@ text_stored_synthetic:
field: field:
script: script:
source: "field('text_stored').get(1, '')" source: "field('text_stored').get(1, '')"
# - match: { hits.hits.0.fields.field.0: "" } - match: { hits.hits.0.fields.field.0: "" }
# - match: { hits.hits.1.fields.field.0: "" } - match: { hits.hits.1.fields.field.0: "" }
# - match: { hits.hits.2.fields.field.0: "SOOOOO much text" } - match: { hits.hits.2.fields.field.0: "SOOOOO much text" }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5305,13 +5286,11 @@ text_stored_synthetic:
field: field:
script: script:
source: "String defaultText = 'default text'; field('text_stored').get(1, defaultText)" source: "String defaultText = 'default text'; field('text_stored').get(1, defaultText)"
# - match: { hits.hits.0.fields.field.0: "default text" } - match: { hits.hits.0.fields.field.0: "default text" }
# - match: { hits.hits.1.fields.field.0: "default text" } - match: { hits.hits.1.fields.field.0: "default text" }
# - match: { hits.hits.2.fields.field.0: "SOOOOO much text" } - match: { hits.hits.2.fields.field.0: "SOOOOO much text" }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5320,13 +5299,11 @@ text_stored_synthetic:
field: field:
script: script:
source: "field('text_stored').get(1, '')" source: "field('text_stored').get(1, '')"
# - match: { hits.hits.0.fields.field.0: "" } - match: { hits.hits.0.fields.field.0: "" }
# - match: { hits.hits.1.fields.field.0: "" } - match: { hits.hits.1.fields.field.0: "" }
# - match: { hits.hits.2.fields.field.0: "SOOOOO much text" } - match: { hits.hits.2.fields.field.0: "SOOOOO much text" }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5334,11 +5311,10 @@ text_stored_synthetic:
script_fields: script_fields:
field: field:
script: script:
source: "String cat = ''; for (String s : field('text_stored')) { cat += s; } cat + field('text_no_field_data').size();" source: "String cat = ''; for (String s : field('text_stored')) { cat += s; } cat + field('text_stored').size();"
# - match: { hits.hits.0.fields.field.0: "Lots of text.1" } - match: { hits.hits.0.fields.field.0: "Lots of text.1" }
# - match: { hits.hits.1.fields.field.0: "0" } - match: { hits.hits.1.fields.field.0: "0" }
# - match: { hits.hits.2.fields.field.0: "Lots of text.SOOOOO much texteven more text3" } - match: { hits.hits.2.fields.field.0: "Lots of text.SOOOOO much texteven more text3" }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
--- ---
match_only_text: match_only_text:
@ -5497,7 +5473,6 @@ match_only_text_synthetic:
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5506,13 +5481,11 @@ match_only_text_synthetic:
field: field:
script: script:
source: "field('match_only_text').get('')" source: "field('match_only_text').get('')"
# - match: { hits.hits.0.fields.field.0: "Lots of text." } - match: { hits.hits.0.fields.field.0: "Lots of text." }
# - match: { hits.hits.1.fields.field.0: "" } - match: { hits.hits.1.fields.field.0: "" }
# - match: { hits.hits.2.fields.field.0: "Lots of text." } - match: { hits.hits.2.fields.field.0: "Lots of text." }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5521,13 +5494,11 @@ match_only_text_synthetic:
field: field:
script: script:
source: "/* avoid yaml stash */ $('match_only_text', '')" source: "/* avoid yaml stash */ $('match_only_text', '')"
# - match: { hits.hits.0.fields.field.0: "Lots of text." } - match: { hits.hits.0.fields.field.0: "Lots of text." }
# - match: { hits.hits.1.fields.field.0: "" } - match: { hits.hits.1.fields.field.0: "" }
# - match: { hits.hits.2.fields.field.0: "Lots of text." } - match: { hits.hits.2.fields.field.0: "Lots of text." }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5536,13 +5507,11 @@ match_only_text_synthetic:
field: field:
script: script:
source: "String defaultText = 'default text'; field('match_only_text').get(defaultText)" source: "String defaultText = 'default text'; field('match_only_text').get(defaultText)"
# - match: { hits.hits.0.fields.field.0: "Lots of text." } - match: { hits.hits.0.fields.field.0: "Lots of text." }
# - match: { hits.hits.1.fields.field.0: "default text" } - match: { hits.hits.1.fields.field.0: "default text" }
# - match: { hits.hits.2.fields.field.0: "Lots of text." } - match: { hits.hits.2.fields.field.0: "Lots of text." }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5551,13 +5520,11 @@ match_only_text_synthetic:
field: field:
script: script:
source: "String defaultText = 'default text'; $('match_only_text', defaultText)" source: "String defaultText = 'default text'; $('match_only_text', defaultText)"
# - match: { hits.hits.0.fields.field.0: "Lots of text." } - match: { hits.hits.0.fields.field.0: "Lots of text." }
# - match: { hits.hits.1.fields.field.0: "default text" } - match: { hits.hits.1.fields.field.0: "default text" }
# - match: { hits.hits.2.fields.field.0: "Lots of text." } - match: { hits.hits.2.fields.field.0: "Lots of text." }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5566,13 +5533,11 @@ match_only_text_synthetic:
field: field:
script: script:
source: "field('match_only_text').get(1, '')" source: "field('match_only_text').get(1, '')"
# - match: { hits.hits.0.fields.field.0: "" } - match: { hits.hits.0.fields.field.0: "" }
# - match: { hits.hits.1.fields.field.0: "" } - match: { hits.hits.1.fields.field.0: "" }
# - match: { hits.hits.2.fields.field.0: "SOOOOO much text" } - match: { hits.hits.2.fields.field.0: "SOOOOO much text" }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5581,13 +5546,11 @@ match_only_text_synthetic:
field: field:
script: script:
source: "String defaultText = 'default text'; field('match_only_text').get(1, defaultText)" source: "String defaultText = 'default text'; field('match_only_text').get(1, defaultText)"
# - match: { hits.hits.0.fields.field.0: "default text" } - match: { hits.hits.0.fields.field.0: "default text" }
# - match: { hits.hits.1.fields.field.0: "default text" } - match: { hits.hits.1.fields.field.0: "default text" }
# - match: { hits.hits.2.fields.field.0: "SOOOOO much text" } - match: { hits.hits.2.fields.field.0: "SOOOOO much text" }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5596,13 +5559,11 @@ match_only_text_synthetic:
field: field:
script: script:
source: "field('match_only_text').get(1, '')" source: "field('match_only_text').get(1, '')"
# - match: { hits.hits.0.fields.field.0: "" } - match: { hits.hits.0.fields.field.0: "" }
# - match: { hits.hits.1.fields.field.0: "" } - match: { hits.hits.1.fields.field.0: "" }
# - match: { hits.hits.2.fields.field.0: "SOOOOO much text" } - match: { hits.hits.2.fields.field.0: "SOOOOO much text" }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
- do: - do:
catch: bad_request # TODO fix in a followup
search: search:
index: test_synthetic index: test_synthetic
body: body:
@ -5611,10 +5572,9 @@ match_only_text_synthetic:
field: field:
script: script:
source: "String cat = ''; for (String s : field('match_only_text')) { cat += s; } cat + field('match_only_text').size();" source: "String cat = ''; for (String s : field('match_only_text')) { cat += s; } cat + field('match_only_text').size();"
# - match: { hits.hits.0.fields.field.0: "Lots of text.1" } - match: { hits.hits.0.fields.field.0: "Lots of text.1" }
# - match: { hits.hits.1.fields.field.0: "0" } - match: { hits.hits.1.fields.field.0: "0" }
# - match: { hits.hits.2.fields.field.0: "Lots of text.SOOOOO much texteven more text3" } - match: { hits.hits.2.fields.field.0: "Lots of text.SOOOOO much texteven more text3" }
- match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" }
--- ---
version and sequence number: version and sequence number:

View file

@ -35,6 +35,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.SourceValueFetcherSortedBinaryIndexFieldData; import org.elasticsearch.index.fielddata.SourceValueFetcherSortedBinaryIndexFieldData;
import org.elasticsearch.index.fielddata.StoredFieldSortedBinaryIndexFieldData;
import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader;
import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader;
import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.DocumentParserContext;
@ -121,7 +122,13 @@ public class MatchOnlyTextFieldMapper extends FieldMapper {
NamedAnalyzer searchQuoteAnalyzer = analyzers.getSearchQuoteAnalyzer(); NamedAnalyzer searchQuoteAnalyzer = analyzers.getSearchQuoteAnalyzer();
NamedAnalyzer indexAnalyzer = analyzers.getIndexAnalyzer(); NamedAnalyzer indexAnalyzer = analyzers.getIndexAnalyzer();
TextSearchInfo tsi = new TextSearchInfo(Defaults.FIELD_TYPE, null, searchAnalyzer, searchQuoteAnalyzer); TextSearchInfo tsi = new TextSearchInfo(Defaults.FIELD_TYPE, null, searchAnalyzer, searchQuoteAnalyzer);
MatchOnlyTextFieldType ft = new MatchOnlyTextFieldType(context.buildFullName(name), tsi, indexAnalyzer, meta.getValue()); MatchOnlyTextFieldType ft = new MatchOnlyTextFieldType(
context.buildFullName(name),
tsi,
indexAnalyzer,
context.isSourceSynthetic(),
meta.getValue()
);
return ft; return ft;
} }
@ -148,10 +155,16 @@ public class MatchOnlyTextFieldMapper extends FieldMapper {
private final Analyzer indexAnalyzer; private final Analyzer indexAnalyzer;
private final TextFieldType textFieldType; private final TextFieldType textFieldType;
public MatchOnlyTextFieldType(String name, TextSearchInfo tsi, Analyzer indexAnalyzer, Map<String, String> meta) { public MatchOnlyTextFieldType(
String name,
TextSearchInfo tsi,
Analyzer indexAnalyzer,
boolean isSyntheticSource,
Map<String, String> meta
) {
super(name, true, false, false, tsi, meta); super(name, true, false, false, tsi, meta);
this.indexAnalyzer = Objects.requireNonNull(indexAnalyzer); this.indexAnalyzer = Objects.requireNonNull(indexAnalyzer);
this.textFieldType = new TextFieldType(name); this.textFieldType = new TextFieldType(name, isSyntheticSource);
} }
public MatchOnlyTextFieldType(String name) { public MatchOnlyTextFieldType(String name) {
@ -159,6 +172,7 @@ public class MatchOnlyTextFieldMapper extends FieldMapper {
name, name,
new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER), new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER),
Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER,
false,
Collections.emptyMap() Collections.emptyMap()
); );
} }
@ -305,7 +319,21 @@ public class MatchOnlyTextFieldMapper extends FieldMapper {
@Override @Override
public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) {
if (fieldDataContext.fielddataOperation() == FielddataOperation.SCRIPT) { if (fieldDataContext.fielddataOperation() != FielddataOperation.SCRIPT) {
throw new IllegalArgumentException(CONTENT_TYPE + " fields do not support sorting and aggregations");
}
if (textFieldType.isSyntheticSource()) {
return (cache, breaker) -> new StoredFieldSortedBinaryIndexFieldData(
storedFieldNameForSyntheticSource(),
CoreValuesSourceType.KEYWORD,
TextDocValuesField::new
) {
@Override
protected BytesRef storedToBytesRef(Object stored) {
return new BytesRef((String) stored);
}
};
}
return new SourceValueFetcherSortedBinaryIndexFieldData.Builder( return new SourceValueFetcherSortedBinaryIndexFieldData.Builder(
name(), name(),
CoreValuesSourceType.KEYWORD, CoreValuesSourceType.KEYWORD,
@ -315,9 +343,6 @@ public class MatchOnlyTextFieldMapper extends FieldMapper {
); );
} }
throw new IllegalArgumentException(CONTENT_TYPE + " fields do not support sorting and aggregations");
}
private String storedFieldNameForSyntheticSource() { private String storedFieldNameForSyntheticSource() {
return name() + "._original"; return name() + "._original";
} }

View file

@ -247,6 +247,16 @@ public class MatchOnlyTextFieldMapperTests extends MapperTestCase {
} }
} }
public void testDocValues() throws IOException {
MapperService mapper = createMapperService(fieldMapping(b -> b.field("type", "match_only_text")));
assertScriptDocValues(mapper, "foo", equalTo(List.of("foo")));
}
public void testDocValuesLoadedFromSynthetic() throws IOException {
MapperService mapper = createMapperService(syntheticSourceFieldMapping(b -> b.field("type", "match_only_text")));
assertScriptDocValues(mapper, "foo", equalTo(List.of("foo")));
}
@Override @Override
protected IngestScriptSupport ingestScriptSupport() { protected IngestScriptSupport ingestScriptSupport() {
throw new AssumptionViolatedException("not supported"); throw new AssumptionViolatedException("not supported");

View file

@ -121,7 +121,13 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
wrapAnalyzer(analyzers.getSearchAnalyzer()), wrapAnalyzer(analyzers.getSearchAnalyzer()),
wrapAnalyzer(analyzers.getSearchQuoteAnalyzer()) wrapAnalyzer(analyzers.getSearchQuoteAnalyzer())
); );
return new AnnotatedTextFieldType(context.buildFullName(name), store.getValue(), tsi, meta.getValue()); return new AnnotatedTextFieldType(
context.buildFullName(name),
store.getValue(),
tsi,
context.isSourceSynthetic(),
meta.getValue()
);
} }
@Override @Override
@ -467,8 +473,14 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
public static final class AnnotatedTextFieldType extends TextFieldMapper.TextFieldType { public static final class AnnotatedTextFieldType extends TextFieldMapper.TextFieldType {
private AnnotatedTextFieldType(String name, boolean store, TextSearchInfo tsi, Map<String, String> meta) { private AnnotatedTextFieldType(
super(name, true, store, tsi, meta); String name,
boolean store,
TextSearchInfo tsi,
boolean isSyntheticSource,
Map<String, String> meta
) {
super(name, true, store, tsi, isSyntheticSource, meta);
} }
public AnnotatedTextFieldType(String name, Map<String, String> meta) { public AnnotatedTextFieldType(String name, Map<String, String> meta) {

View file

@ -0,0 +1,108 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader;
import org.elasticsearch.index.fieldvisitor.StoredFieldLoader;
import org.elasticsearch.script.field.DocValuesScriptFieldFactory;
import org.elasticsearch.script.field.ToScriptFieldFactory;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.util.Set;
/**
* Per segment values for a field loaded from stored fields.
*/
public abstract class StoredFieldIndexFieldData<T> implements IndexFieldData<StoredFieldIndexFieldData<T>.StoredFieldLeafFieldData> {
private final String fieldName;
private final ValuesSourceType valuesSourceType;
protected final ToScriptFieldFactory<T> toScriptFieldFactory;
protected final StoredFieldLoader loader;
protected StoredFieldIndexFieldData(String fieldName, ValuesSourceType valuesSourceType, ToScriptFieldFactory<T> toScriptFieldFactory) {
this.fieldName = fieldName;
this.valuesSourceType = valuesSourceType;
this.toScriptFieldFactory = toScriptFieldFactory;
this.loader = StoredFieldLoader.create(false, Set.of(fieldName));
}
@Override
public String getFieldName() {
return fieldName;
}
@Override
public ValuesSourceType getValuesSourceType() {
return valuesSourceType;
}
@Override
public final StoredFieldLeafFieldData load(LeafReaderContext context) {
return loadDirect(context);
}
@Override
public final StoredFieldLeafFieldData loadDirect(LeafReaderContext context) {
return new StoredFieldLeafFieldData(loader.getLoader(context, null));
}
protected abstract T loadLeaf(LeafStoredFieldLoader leafStoredFieldLoader);
@Override
public SortField sortField(Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) {
throw new IllegalArgumentException("not supported for stored field fallback");
}
@Override
public BucketedSort newBucketedSort(
BigArrays bigArrays,
Object missingValue,
MultiValueMode sortMode,
XFieldComparatorSource.Nested nested,
SortOrder sortOrder,
DocValueFormat format,
int bucketSize,
BucketedSort.ExtraData extra
) {
throw new IllegalArgumentException("not supported for stored field fallback");
}
public class StoredFieldLeafFieldData implements LeafFieldData {
private final LeafStoredFieldLoader loader;
protected StoredFieldLeafFieldData(LeafStoredFieldLoader loader) {
this.loader = loader;
}
@Override
public DocValuesScriptFieldFactory getScriptFieldFactory(String name) {
return toScriptFieldFactory.getScriptFieldFactory(loadLeaf(loader), fieldName);
}
@Override
public long ramBytesUsed() {
return 0;
}
@Override
public void close() {}
@Override
public SortedBinaryDocValues getBytesValues() {
throw new IllegalArgumentException("not supported for source fallback");
}
}
}

View file

@ -0,0 +1,82 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader;
import org.elasticsearch.script.field.ToScriptFieldFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Per segment values for a field loaded from stored fields exposing {@link SortedBinaryDocValues}.
*/
public abstract class StoredFieldSortedBinaryIndexFieldData extends StoredFieldIndexFieldData<SortedBinaryDocValues> {
protected StoredFieldSortedBinaryIndexFieldData(
String fieldName,
ValuesSourceType valuesSourceType,
ToScriptFieldFactory<SortedBinaryDocValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, toScriptFieldFactory);
}
@Override
protected SourceValueFetcherSortedBinaryDocValues loadLeaf(LeafStoredFieldLoader leafStoredFieldLoader) {
return new SourceValueFetcherSortedBinaryDocValues(leafStoredFieldLoader);
}
protected abstract BytesRef storedToBytesRef(Object stored);
class SourceValueFetcherSortedBinaryDocValues extends SortedBinaryDocValues {
private final LeafStoredFieldLoader loader;
private final List<BytesRef> sorted = new ArrayList<>();
private int current;
private int docValueCount;
SourceValueFetcherSortedBinaryDocValues(LeafStoredFieldLoader loader) {
this.loader = loader;
}
@Override
public boolean advanceExact(int doc) throws IOException {
loader.advanceTo(doc);
List<Object> values = loader.storedFields().get(getFieldName());
if (values == null || values.isEmpty()) {
current = 0;
docValueCount = 0;
return false;
}
sorted.clear();
for (Object o : values) {
sorted.add(storedToBytesRef(o));
}
Collections.sort(sorted);
current = 0;
docValueCount = sorted.size();
return true;
}
@Override
public int docValueCount() {
return docValueCount;
}
@Override
public BytesRef nextValue() throws IOException {
assert current < docValueCount;
return sorted.get(current++);
}
}
}

View file

@ -47,6 +47,7 @@ import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.SourceValueFetcherSortedBinaryIndexFieldData; import org.elasticsearch.index.fielddata.SourceValueFetcherSortedBinaryIndexFieldData;
import org.elasticsearch.index.fielddata.StoredFieldSortedBinaryIndexFieldData;
import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData;
import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.index.similarity.SimilarityProvider;
@ -294,7 +295,15 @@ public final class KeywordFieldMapper extends FieldMapper {
} else if (splitQueriesOnWhitespace.getValue()) { } else if (splitQueriesOnWhitespace.getValue()) {
searchAnalyzer = Lucene.WHITESPACE_ANALYZER; searchAnalyzer = Lucene.WHITESPACE_ANALYZER;
} }
return new KeywordFieldType(context.buildFullName(name), fieldType, normalizer, searchAnalyzer, quoteAnalyzer, this); return new KeywordFieldType(
context.buildFullName(name),
fieldType,
normalizer,
searchAnalyzer,
quoteAnalyzer,
this,
context.isSourceSynthetic()
);
} }
@Override @Override
@ -334,6 +343,7 @@ public final class KeywordFieldMapper extends FieldMapper {
private final boolean eagerGlobalOrdinals; private final boolean eagerGlobalOrdinals;
private final FieldValues<String> scriptValues; private final FieldValues<String> scriptValues;
private final boolean isDimension; private final boolean isDimension;
private final boolean isSyntheticSource;
public KeywordFieldType( public KeywordFieldType(
String name, String name,
@ -341,7 +351,8 @@ public final class KeywordFieldMapper extends FieldMapper {
NamedAnalyzer normalizer, NamedAnalyzer normalizer,
NamedAnalyzer searchAnalyzer, NamedAnalyzer searchAnalyzer,
NamedAnalyzer quoteAnalyzer, NamedAnalyzer quoteAnalyzer,
Builder builder Builder builder,
boolean isSyntheticSource
) { ) {
super( super(
name, name,
@ -357,6 +368,7 @@ public final class KeywordFieldMapper extends FieldMapper {
this.nullValue = builder.nullValue.getValue(); this.nullValue = builder.nullValue.getValue();
this.scriptValues = builder.scriptValues(); this.scriptValues = builder.scriptValues();
this.isDimension = builder.dimension.getValue(); this.isDimension = builder.dimension.getValue();
this.isSyntheticSource = isSyntheticSource;
} }
public KeywordFieldType(String name, boolean isIndexed, boolean hasDocValues, Map<String, String> meta) { public KeywordFieldType(String name, boolean isIndexed, boolean hasDocValues, Map<String, String> meta) {
@ -367,6 +379,7 @@ public final class KeywordFieldMapper extends FieldMapper {
this.eagerGlobalOrdinals = false; this.eagerGlobalOrdinals = false;
this.scriptValues = null; this.scriptValues = null;
this.isDimension = false; this.isDimension = false;
this.isSyntheticSource = false;
} }
public KeywordFieldType(String name) { public KeywordFieldType(String name) {
@ -388,6 +401,7 @@ public final class KeywordFieldMapper extends FieldMapper {
this.eagerGlobalOrdinals = false; this.eagerGlobalOrdinals = false;
this.scriptValues = null; this.scriptValues = null;
this.isDimension = false; this.isDimension = false;
this.isSyntheticSource = false;
} }
public KeywordFieldType(String name, NamedAnalyzer analyzer) { public KeywordFieldType(String name, NamedAnalyzer analyzer) {
@ -398,6 +412,7 @@ public final class KeywordFieldMapper extends FieldMapper {
this.eagerGlobalOrdinals = false; this.eagerGlobalOrdinals = false;
this.scriptValues = null; this.scriptValues = null;
this.isDimension = false; this.isDimension = false;
this.isSyntheticSource = false;
} }
@Override @Override
@ -686,9 +701,46 @@ public final class KeywordFieldMapper extends FieldMapper {
if (operation == FielddataOperation.SEARCH) { if (operation == FielddataOperation.SEARCH) {
failIfNoDocValues(); failIfNoDocValues();
return fieldDataFromDocValues();
}
if (operation != FielddataOperation.SCRIPT) {
throw new IllegalStateException("unknown operation [" + operation.name() + "]");
} }
if ((operation == FielddataOperation.SEARCH || operation == FielddataOperation.SCRIPT) && hasDocValues()) { if (hasDocValues()) {
return fieldDataFromDocValues();
}
if (isSyntheticSource) {
if (false == isStored()) {
throw new IllegalStateException(
"keyword field ["
+ name()
+ "] is only supported in synthetic _source index if it creates doc values or stored fields"
);
}
return (cache, breaker) -> new StoredFieldSortedBinaryIndexFieldData(
name(),
CoreValuesSourceType.KEYWORD,
KeywordDocValuesField::new
) {
@Override
protected BytesRef storedToBytesRef(Object stored) {
return (BytesRef) stored;
}
};
}
Set<String> sourcePaths = fieldDataContext.sourcePathsLookup().apply(name());
return new SourceValueFetcherSortedBinaryIndexFieldData.Builder(
name(),
CoreValuesSourceType.KEYWORD,
sourceValueFetcher(sourcePaths),
fieldDataContext.lookupSupplier().get().source(),
KeywordDocValuesField::new
);
}
private SortedSetOrdinalsIndexFieldData.Builder fieldDataFromDocValues() {
return new SortedSetOrdinalsIndexFieldData.Builder( return new SortedSetOrdinalsIndexFieldData.Builder(
name(), name(),
CoreValuesSourceType.KEYWORD, CoreValuesSourceType.KEYWORD,
@ -696,22 +748,6 @@ public final class KeywordFieldMapper extends FieldMapper {
); );
} }
if (operation == FielddataOperation.SCRIPT) {
SearchLookup searchLookup = fieldDataContext.lookupSupplier().get();
Set<String> sourcePaths = fieldDataContext.sourcePathsLookup().apply(name());
return new SourceValueFetcherSortedBinaryIndexFieldData.Builder(
name(),
CoreValuesSourceType.KEYWORD,
sourceValueFetcher(sourcePaths),
searchLookup.source(),
KeywordDocValuesField::new
);
}
throw new IllegalStateException("unknown field data type [" + operation.name() + "]");
}
@Override @Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
if (format != null) { if (format != null) {
@ -1112,6 +1148,7 @@ public final class KeywordFieldMapper extends FieldMapper {
fieldType().ignoreAbove == Defaults.IGNORE_ABOVE ? null : originalName(), fieldType().ignoreAbove == Defaults.IGNORE_ABOVE ? null : originalName(),
false false
) { ) {
@Override @Override
protected BytesRef convert(BytesRef value) { protected BytesRef convert(BytesRef value) {
return value; return value;

View file

@ -174,8 +174,8 @@ public abstract class MappedFieldType {
return null; return null;
} }
/** Returns true if the field is aggregatable. /**
* * Returns true if the field is aggregatable.
*/ */
public boolean isAggregatable() { public boolean isAggregatable() {
return hasDocValues(); return hasDocValues();

View file

@ -61,6 +61,7 @@ import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SourceValueFetcherSortedBinaryIndexFieldData; import org.elasticsearch.index.fielddata.SourceValueFetcherSortedBinaryIndexFieldData;
import org.elasticsearch.index.fielddata.StoredFieldSortedBinaryIndexFieldData;
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.index.similarity.SimilarityProvider;
@ -351,7 +352,14 @@ public class TextFieldMapper extends FieldMapper {
ft = new LegacyTextFieldType(context.buildFullName(name), index.getValue(), store.getValue(), tsi, meta.getValue()); ft = new LegacyTextFieldType(context.buildFullName(name), index.getValue(), store.getValue(), tsi, meta.getValue());
// ignore fieldData and eagerGlobalOrdinals // ignore fieldData and eagerGlobalOrdinals
} else { } else {
ft = new TextFieldType(context.buildFullName(name), index.getValue(), store.getValue(), tsi, meta.getValue()); ft = new TextFieldType(
context.buildFullName(name),
index.getValue(),
store.getValue(),
tsi,
context.isSourceSynthetic(),
meta.getValue()
);
ft.eagerGlobalOrdinals = eagerGlobalOrdinals.getValue(); ft.eagerGlobalOrdinals = eagerGlobalOrdinals.getValue();
if (fieldData.getValue()) { if (fieldData.getValue()) {
ft.setFielddata(true, freqFilter.getValue()); ft.setFielddata(true, freqFilter.getValue());
@ -647,10 +655,19 @@ public class TextFieldMapper extends FieldMapper {
private PrefixFieldType prefixFieldType; private PrefixFieldType prefixFieldType;
private boolean indexPhrases = false; private boolean indexPhrases = false;
private boolean eagerGlobalOrdinals = false; private boolean eagerGlobalOrdinals = false;
private final boolean isSyntheticSource;
public TextFieldType(String name, boolean indexed, boolean stored, TextSearchInfo tsi, Map<String, String> meta) { public TextFieldType(
String name,
boolean indexed,
boolean stored,
TextSearchInfo tsi,
boolean isSyntheticSource,
Map<String, String> meta
) {
super(name, indexed, stored, false, tsi, meta); super(name, indexed, stored, false, tsi, meta);
fielddata = false; fielddata = false;
this.isSyntheticSource = isSyntheticSource;
} }
public TextFieldType(String name, boolean indexed, boolean stored, Map<String, String> meta) { public TextFieldType(String name, boolean indexed, boolean stored, Map<String, String> meta) {
@ -663,14 +680,16 @@ public class TextFieldMapper extends FieldMapper {
meta meta
); );
fielddata = false; fielddata = false;
isSyntheticSource = false;
} }
public TextFieldType(String name) { public TextFieldType(String name, boolean isSyntheticSource) {
this( this(
name, name,
true, true,
false, false,
new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER), new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER),
isSyntheticSource,
Collections.emptyMap() Collections.emptyMap()
); );
} }
@ -902,16 +921,7 @@ public class TextFieldMapper extends FieldMapper {
@Override @Override
public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) {
FielddataOperation operation = fieldDataContext.fielddataOperation(); FielddataOperation operation = fieldDataContext.fielddataOperation();
if (operation == FielddataOperation.SEARCH) {
if (operation == FielddataOperation.SCRIPT) {
return new SourceValueFetcherSortedBinaryIndexFieldData.Builder(
name(),
CoreValuesSourceType.KEYWORD,
SourceValueFetcher.toString(fieldDataContext.sourcePathsLookup().apply(name())),
fieldDataContext.lookupSupplier().get().source(),
TextDocValuesField::new
);
} else if (operation == FielddataOperation.SEARCH) {
if (fielddata == false) { if (fielddata == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Fielddata is disabled on [" "Fielddata is disabled on ["
@ -939,14 +949,53 @@ public class TextFieldMapper extends FieldMapper {
); );
} }
if (operation != FielddataOperation.SCRIPT) {
throw new IllegalStateException("unknown field data operation [" + operation.name() + "]"); throw new IllegalStateException("unknown field data operation [" + operation.name() + "]");
} }
if (isSyntheticSource) {
if (isStored()) {
return (cache, breaker) -> new StoredFieldSortedBinaryIndexFieldData(
name(),
CoreValuesSourceType.KEYWORD,
TextDocValuesField::new
) {
@Override
protected BytesRef storedToBytesRef(Object stored) {
return new BytesRef((String) stored);
}
};
}
throw new IllegalArgumentException(
"fetching values from a text field ["
+ name()
+ "] is not yet supported because synthetic _source is enabled and the field doesn't create stored fields"
);
}
return new SourceValueFetcherSortedBinaryIndexFieldData.Builder(
name(),
CoreValuesSourceType.KEYWORD,
SourceValueFetcher.toString(fieldDataContext.sourcePathsLookup().apply(name())),
fieldDataContext.lookupSupplier().get().source(),
TextDocValuesField::new
);
}
public boolean isSyntheticSource() {
return isSyntheticSource;
}
} }
public static class ConstantScoreTextFieldType extends TextFieldType { public static class ConstantScoreTextFieldType extends TextFieldType {
public ConstantScoreTextFieldType(String name, boolean indexed, boolean stored, TextSearchInfo tsi, Map<String, String> meta) { public ConstantScoreTextFieldType(
super(name, indexed, stored, tsi, meta); String name,
boolean indexed,
boolean stored,
TextSearchInfo tsi,
boolean isSyntheticSource,
Map<String, String> meta
) {
super(name, indexed, stored, tsi, isSyntheticSource, meta);
} }
public ConstantScoreTextFieldType(String name) { public ConstantScoreTextFieldType(String name) {
@ -955,6 +1004,7 @@ public class TextFieldMapper extends FieldMapper {
true, true,
false, false,
new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER), new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER),
false,
Collections.emptyMap() Collections.emptyMap()
); );
} }
@ -965,6 +1015,7 @@ public class TextFieldMapper extends FieldMapper {
indexed, indexed,
stored, stored,
new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER), new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER),
false,
meta meta
); );
} }
@ -1020,7 +1071,7 @@ public class TextFieldMapper extends FieldMapper {
private final MappedFieldType existQueryFieldType; private final MappedFieldType existQueryFieldType;
LegacyTextFieldType(String name, boolean indexed, boolean stored, TextSearchInfo tsi, Map<String, String> meta) { LegacyTextFieldType(String name, boolean indexed, boolean stored, TextSearchInfo tsi, Map<String, String> meta) {
super(name, indexed, stored, tsi, meta); super(name, indexed, stored, tsi, false, meta);
// norms are not available, neither are doc-values, so fall back to _source to run exists query // norms are not available, neither are doc-values, so fall back to _source to run exists query
existQueryFieldType = KeywordScriptFieldType.sourceOnly(name()).asMappedFieldTypes().findFirst().get(); existQueryFieldType = KeywordScriptFieldType.sourceOnly(name()).asMappedFieldTypes().findFirst().get();
} }

View file

@ -226,7 +226,7 @@ public class SourceLookup implements Source, Map<String, Object> {
@Override @Override
public void setSegmentAndDocument(LeafReaderContext context, int docId) { public void setSegmentAndDocument(LeafReaderContext context, int docId) {
// // nothing to do
} }
} }

View file

@ -189,7 +189,13 @@ public abstract class AbstractScriptFieldTypeTestCase extends MapperServiceTestC
} }
protected static FieldDataContext mockFielddataContext() { protected static FieldDataContext mockFielddataContext() {
return new FieldDataContext("test", mockContext()::lookup, mockContext()::sourcePath, MappedFieldType.FielddataOperation.SCRIPT); SearchExecutionContext searchExecutionContext = mockContext();
return new FieldDataContext(
"test",
searchExecutionContext::lookup,
mockContext()::sourcePath,
MappedFieldType.FielddataOperation.SCRIPT
);
} }
protected static SearchExecutionContext mockContext(boolean allowExpensiveQueries) { protected static SearchExecutionContext mockContext(boolean allowExpensiveQueries) {

View file

@ -53,7 +53,7 @@ public class IgnoredFieldMapperTests extends MapperServiceTestCase {
iw -> { iw -> {
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
mapperService::fieldType, mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths), fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider() new SourceLookup.ReaderSourceProvider()
); );
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);

View file

@ -49,7 +49,7 @@ public class IndexFieldMapperTests extends MapperServiceTestCase {
IndexFieldMapper.IndexFieldType ft = (IndexFieldMapper.IndexFieldType) mapperService.fieldType("_index"); IndexFieldMapper.IndexFieldType ft = (IndexFieldMapper.IndexFieldType) mapperService.fieldType("_index");
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
mapperService::fieldType, mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths), fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider() new SourceLookup.ReaderSourceProvider()
); );
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);

View file

@ -768,4 +768,21 @@ public class KeywordFieldMapperTests extends MapperTestCase {
assertThat(service.fieldType("mykeyw"), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); assertThat(service.fieldType("mykeyw"), instanceOf(KeywordFieldMapper.KeywordFieldType.class));
assertNotEquals(Lucene.KEYWORD_ANALYZER, ((KeywordFieldMapper.KeywordFieldType) service.fieldType("mykeyw")).normalizer()); assertNotEquals(Lucene.KEYWORD_ANALYZER, ((KeywordFieldMapper.KeywordFieldType) service.fieldType("mykeyw")).normalizer());
} }
public void testDocValues() throws IOException {
MapperService mapper = createMapperService(fieldMapping(b -> b.field("type", "keyword")));
assertScriptDocValues(mapper, "foo", equalTo(List.of("foo")));
}
public void testDocValuesLoadedFromSource() throws IOException {
MapperService mapper = createMapperService(fieldMapping(b -> b.field("type", "keyword").field("doc_values", false)));
assertScriptDocValues(mapper, "foo", equalTo(List.of("foo")));
}
public void testDocValuesLoadedFromStoredSynthetic() throws IOException {
MapperService mapper = createMapperService(
syntheticSourceFieldMapping(b -> b.field("type", "keyword").field("doc_values", false).field("store", true))
);
assertScriptDocValues(mapper, "foo", equalTo(List.of("foo")));
}
} }

View file

@ -64,7 +64,7 @@ public class PlaceHolderFieldMapperTests extends MapperServiceTestCase {
}, iw -> { }, iw -> {
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
mapperService::fieldType, mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths), fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider() new SourceLookup.ReaderSourceProvider()
); );
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);

View file

@ -50,7 +50,6 @@ public class ProvidedIdFieldMapperTests extends MapperServiceTestCase {
} }
public void testEnableFieldData() throws IOException { public void testEnableFieldData() throws IOException {
boolean[] enabled = new boolean[1]; boolean[] enabled = new boolean[1];
MapperService mapperService = createMapperService(() -> enabled[0], mapping(b -> {})); MapperService mapperService = createMapperService(() -> enabled[0], mapping(b -> {}));
@ -78,7 +77,7 @@ public class ProvidedIdFieldMapperTests extends MapperServiceTestCase {
iw -> { iw -> {
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
mapperService::fieldType, mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths), fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider() new SourceLookup.ReaderSourceProvider()
); );
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);

View file

@ -71,7 +71,7 @@ public class RoutingFieldMapperTests extends MetadataMapperTestCase {
iw -> { iw -> {
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
mapperService::fieldType, mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths), fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider() new SourceLookup.ReaderSourceProvider()
); );
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);

View file

@ -1227,4 +1227,43 @@ public class TextFieldMapperTests extends MapperTestCase {
mapperService = createMapperService(Version.fromString("5.0.0"), mapping); mapperService = createMapperService(Version.fromString("5.0.0"), mapping);
assertFalse(((TextFieldMapper) mapperService.documentMapper().mappers().getMapper("field")).fieldType().eagerGlobalOrdinals()); assertFalse(((TextFieldMapper) mapperService.documentMapper().mappers().getMapper("field")).fieldType().eagerGlobalOrdinals());
} }
public void testDocValues() throws IOException {
MapperService mapper = createMapperService(fieldMapping(b -> b.field("type", "text")));
for (String input : new String[] {
"foo", // Won't be tokenized
"foo bar", // Will be tokenized. But script doc values still returns the whole field.
}) {
assertScriptDocValues(mapper, input, equalTo(List.of(input)));
}
}
public void testDocValuesLoadedFromStoredSynthetic() throws IOException {
MapperService mapper = createMapperService(syntheticSourceFieldMapping(b -> b.field("type", "text").field("store", true)));
for (String input : new String[] {
"foo", // Won't be tokenized
"foo bar", // Will be tokenized. But script doc values still returns the whole field.
}) {
assertScriptDocValues(mapper, input, equalTo(List.of(input)));
}
}
public void testDocValuesLoadedFromSubKeywordSynthetic() throws IOException {
MapperService mapper = createMapperService(syntheticSourceFieldMapping(b -> {
b.field("type", "text");
b.startObject("fields");
{
b.startObject("raw").field("type", "keyword").endObject();
}
b.endObject();
}));
Exception e = expectThrows(IllegalArgumentException.class, () -> assertScriptDocValues(mapper, "foo", equalTo(List.of("foo"))));
assertThat(
e.getMessage(),
equalTo(
"fetching values from a text field [field] is not yet supported because synthetic _source is "
+ "enabled and the field doesn't create stored fields"
)
);
}
} }

View file

@ -45,7 +45,7 @@ import static org.hamcrest.Matchers.equalTo;
public class TextFieldTypeTests extends FieldTypeTestCase { public class TextFieldTypeTests extends FieldTypeTestCase {
private static TextFieldType createFieldType() { private static TextFieldType createFieldType() {
return new TextFieldType("field"); return new TextFieldType("field", randomBoolean());
} }
public void testIsAggregatableDependsOnFieldData() { public void testIsAggregatableDependsOnFieldData() {

View file

@ -53,7 +53,7 @@ public class VersionFieldMapperTests extends MapperServiceTestCase {
VersionFieldMapper.VersionFieldType ft = (VersionFieldMapper.VersionFieldType) mapperService.fieldType("_version"); VersionFieldMapper.VersionFieldType ft = (VersionFieldMapper.VersionFieldType) mapperService.fieldType("_version");
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
mapperService::fieldType, mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths), fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider() new SourceLookup.ReaderSourceProvider()
); );
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);

View file

@ -104,7 +104,7 @@ public class SearchExecutionContextTests extends ESTestCase {
public void testFailIfFieldMappingNotFound() { public void testFailIfFieldMappingNotFound() {
SearchExecutionContext context = createSearchExecutionContext(IndexMetadata.INDEX_UUID_NA_VALUE, null); SearchExecutionContext context = createSearchExecutionContext(IndexMetadata.INDEX_UUID_NA_VALUE, null);
context.setAllowUnmappedFields(false); context.setAllowUnmappedFields(false);
MappedFieldType fieldType = new TextFieldMapper.TextFieldType("text"); MappedFieldType fieldType = new TextFieldMapper.TextFieldType("text", randomBoolean());
MappedFieldType result = context.failIfFieldMappingNotFound("name", fieldType); MappedFieldType result = context.failIfFieldMappingNotFound("name", fieldType);
assertThat(result, sameInstance(fieldType)); assertThat(result, sameInstance(fieldType));
QueryShardException e = expectThrows(QueryShardException.class, () -> context.failIfFieldMappingNotFound("name", null)); QueryShardException e = expectThrows(QueryShardException.class, () -> context.failIfFieldMappingNotFound("name", null));

View file

@ -431,7 +431,7 @@ public class FiltersAggregatorTests extends AggregatorTestCase {
* Tests a filter that needs the cache to be fast. * Tests a filter that needs the cache to be fast.
*/ */
public void testPhraseFilter() throws IOException { public void testPhraseFilter() throws IOException {
MappedFieldType ft = new TextFieldMapper.TextFieldType("test"); MappedFieldType ft = new TextFieldMapper.TextFieldType("test", randomBoolean());
AggregationBuilder builder = new FiltersAggregationBuilder( AggregationBuilder builder = new FiltersAggregationBuilder(
"test", "test",
new KeyedFilter("q1", new MatchPhraseQueryBuilder("test", "will find me").slop(0)) new KeyedFilter("q1", new MatchPhraseQueryBuilder("test", "will find me").slop(0))

View file

@ -41,7 +41,7 @@ public class SamplerAggregatorTests extends AggregatorTestCase {
* Uses the sampler aggregation to find the minimum value of a field out of the top 3 scoring documents in a search. * Uses the sampler aggregation to find the minimum value of a field out of the top 3 scoring documents in a search.
*/ */
public void testSampler() throws IOException { public void testSampler() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType("int", NumberFieldMapper.NumberType.LONG); MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType("int", NumberFieldMapper.NumberType.LONG);
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
@ -76,7 +76,7 @@ public class SamplerAggregatorTests extends AggregatorTestCase {
} }
public void testRidiculousSize() throws IOException { public void testRidiculousSize() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType("int", NumberFieldMapper.NumberType.LONG); MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType("int", NumberFieldMapper.NumberType.LONG);
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); IndexWriterConfig indexWriterConfig = newIndexWriterConfig();

View file

@ -103,7 +103,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
} }
public void testSignificance(SignificanceHeuristic heuristic) throws IOException { public void testSignificance(SignificanceHeuristic heuristic) throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
textFieldType.setFielddata(true); textFieldType.setFielddata(true);
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer());
@ -211,7 +211,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
* @throws IOException on test setup failure * @throws IOException on test setup failure
*/ */
public void testSamplingConsistency() throws IOException { public void testSamplingConsistency() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
textFieldType.setFielddata(true); textFieldType.setFielddata(true);
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer());
@ -317,7 +317,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
* Uses the significant terms aggregation on an index with unmapped field * Uses the significant terms aggregation on an index with unmapped field
*/ */
public void testUnmapped() throws IOException { public void testUnmapped() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
textFieldType.setFielddata(true); textFieldType.setFielddata(true);
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer());
@ -384,7 +384,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
} }
public void testFieldAlias() throws IOException { public void testFieldAlias() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
textFieldType.setFielddata(true); textFieldType.setFielddata(true);
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer());
@ -440,7 +440,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
} }
public void testFieldBackground() throws IOException { public void testFieldBackground() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
textFieldType.setFielddata(true); textFieldType.setFielddata(true);
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer());

View file

@ -71,7 +71,7 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase {
* Uses the significant text aggregation to find the keywords in text fields * Uses the significant text aggregation to find the keywords in text fields
*/ */
public void testSignificance() throws IOException { public void testSignificance() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer());
indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setMaxBufferedDocs(100);
@ -126,7 +126,7 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase {
* Uses the significant text aggregation to find the keywords in text fields and include/exclude selected terms * Uses the significant text aggregation to find the keywords in text fields and include/exclude selected terms
*/ */
public void testIncludeExcludes() throws IOException { public void testIncludeExcludes() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer());
indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setMaxBufferedDocs(100);
@ -187,7 +187,7 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase {
} }
public void testMissingField() throws IOException { public void testMissingField() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setMaxBufferedDocs(100);
@ -215,7 +215,7 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase {
} }
public void testFieldAlias() throws IOException { public void testFieldAlias() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer());
indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setMaxBufferedDocs(100);
@ -274,7 +274,7 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase {
} }
public void testInsideTermsAgg() throws IOException { public void testInsideTermsAgg() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer());
indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setMaxBufferedDocs(100);
@ -331,7 +331,7 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase {
* Test documents with arrays of text * Test documents with arrays of text
*/ */
public void testSignificanceOnTextArrays() throws IOException { public void testSignificanceOnTextArrays() throws IOException {
TextFieldType textFieldType = new TextFieldType("text"); TextFieldType textFieldType = new TextFieldType("text", randomBoolean());
IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer());
indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setMaxBufferedDocs(100);

View file

@ -23,6 +23,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperBuilderContext;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedLookup;
import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.index.mapper.NestedObjectMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper;
@ -48,7 +49,6 @@ import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.mockito.Mockito;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
@ -60,6 +60,7 @@ import java.util.function.Function;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.mockito.Mockito.mock;
public abstract class AbstractSortTestCase<T extends SortBuilder<T>> extends ESTestCase { public abstract class AbstractSortTestCase<T extends SortBuilder<T>> extends ESTestCase {
@ -187,7 +188,7 @@ public abstract class AbstractSortTestCase<T extends SortBuilder<T>> extends EST
index, index,
Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build() Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build()
); );
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, Mockito.mock(BitsetFilterCache.Listener.class)); BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, mock(BitsetFilterCache.Listener.class));
BiFunction<MappedFieldType, FieldDataContext, IndexFieldData<?>> indexFieldDataLookup = (fieldType, fdc) -> { BiFunction<MappedFieldType, FieldDataContext, IndexFieldData<?>> indexFieldDataLookup = (fieldType, fdc) -> {
IndexFieldData.Builder builder = fieldType.fielddataBuilder(fdc); IndexFieldData.Builder builder = fieldType.fielddataBuilder(fdc);
return builder.build(new IndexFieldDataCache.None(), null); return builder.build(new IndexFieldDataCache.None(), null);
@ -202,7 +203,7 @@ public abstract class AbstractSortTestCase<T extends SortBuilder<T>> extends EST
bitsetFilterCache, bitsetFilterCache,
indexFieldDataLookup, indexFieldDataLookup,
null, null,
null, MappingLookup.EMPTY,
null, null,
scriptService, scriptService,
parserConfig(), parserConfig(),

View file

@ -796,7 +796,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase {
assertTrue(context.contains("category1")); assertTrue(context.contains("category1"));
document = new LuceneDocument(); document = new LuceneDocument();
TextFieldMapper.TextFieldType text = new TextFieldMapper.TextFieldType("category"); TextFieldMapper.TextFieldType text = new TextFieldMapper.TextFieldType("category", randomBoolean());
document.add(new Field(text.name(), "category1", TextFieldMapper.Defaults.FIELD_TYPE)); document.add(new Field(text.name(), "category1", TextFieldMapper.Defaults.FIELD_TYPE));
// Ignore stored field // Ignore stored field
document.add(new StoredField(text.name(), "category1", TextFieldMapper.Defaults.FIELD_TYPE)); document.add(new StoredField(text.name(), "category1", TextFieldMapper.Defaults.FIELD_TYPE));

View file

@ -659,6 +659,12 @@ public abstract class MapperServiceTestCase extends ESTestCase {
); );
} }
protected TriFunction<MappedFieldType, Supplier<SearchLookup>, MappedFieldType.FielddataOperation, IndexFieldData<?>> fieldDataLookup(
MapperService mapperService
) {
return fieldDataLookup(mapperService.mappingLookup()::sourcePaths);
}
protected TriFunction<MappedFieldType, Supplier<SearchLookup>, MappedFieldType.FielddataOperation, IndexFieldData<?>> fieldDataLookup( protected TriFunction<MappedFieldType, Supplier<SearchLookup>, MappedFieldType.FielddataOperation, IndexFieldData<?>> fieldDataLookup(
Function<String, Set<String>> sourcePathsLookup Function<String, Set<String>> sourcePathsLookup
) { ) {
@ -744,4 +750,13 @@ public abstract class MapperServiceTestCase extends ESTestCase {
b.endObject(); b.endObject();
}); });
} }
protected final XContentBuilder syntheticSourceFieldMapping(CheckedConsumer<XContentBuilder, IOException> buildField)
throws IOException {
return syntheticSourceMapping(b -> {
b.startObject("field");
buildField.accept(b);
b.endObject();
});
}
} }

View file

@ -34,7 +34,9 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedConsumer;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.LeafFieldData;
import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader;
import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader;
import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContext;
@ -507,7 +509,7 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
iw -> { iw -> {
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
mapperService::fieldType, mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths), fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider() new SourceLookup.ReaderSourceProvider()
); );
ValueFetcher valueFetcher = new DocValueFetcher(format, lookup.getForField(ft, MappedFieldType.FielddataOperation.SEARCH)); ValueFetcher valueFetcher = new DocValueFetcher(format, lookup.getForField(ft, MappedFieldType.FielddataOperation.SEARCH));
@ -521,6 +523,26 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
return result.get(); return result.get();
} }
protected final void assertScriptDocValues(MapperService mapperService, Object sourceValue, Matcher<List<?>> dvMatcher)
throws IOException {
withLuceneIndex(
mapperService,
iw -> { iw.addDocument(mapperService.documentMapper().parse(source(b -> b.field("field", sourceValue))).rootDoc()); },
iw -> {
IndexSearcher searcher = newSearcher(iw);
MappedFieldType ft = mapperService.fieldType("field");
SearchLookup searchLookup = new SearchLookup(null, null, mapperService.mappingLookup().getSourceProvider());
IndexFieldData<?> sfd = ft.fielddataBuilder(
new FieldDataContext("", () -> searchLookup, Set::of, MappedFieldType.FielddataOperation.SCRIPT)
).build(null, null);
LeafFieldData lfd = sfd.load(getOnlyLeafReader(searcher.getIndexReader()).getContext());
DocValuesScriptFieldFactory sff = lfd.getScriptFieldFactory("field");
sff.setNextDocId(0);
assertThat(sff.toScriptDocValues(), dvMatcher);
}
);
}
private class UpdateCheck { private class UpdateCheck {
final XContentBuilder init; final XContentBuilder init;
final XContentBuilder update; final XContentBuilder update;
@ -785,11 +807,7 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
when(searchExecutionContext.isSourceEnabled()).thenReturn(true); when(searchExecutionContext.isSourceEnabled()).thenReturn(true);
when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field)); when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field));
when(searchExecutionContext.getForField(ft, fdt)).thenAnswer( when(searchExecutionContext.getForField(ft, fdt)).thenAnswer(
inv -> fieldDataLookup(mapperService.mappingLookup()::sourcePaths).apply( inv -> fieldDataLookup(mapperService).apply(ft, () -> { throw new UnsupportedOperationException(); }, fdt)
ft,
() -> { throw new UnsupportedOperationException(); },
fdt
)
); );
ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format); ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format);
ParsedDocument doc = mapperService.documentMapper().parse(source); ParsedDocument doc = mapperService.documentMapper().parse(source);

View file

@ -116,7 +116,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
} }
public void testMissing() throws IOException { public void testMissing() throws IOException {
final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text", randomBoolean());
fieldType.setFielddata(true); fieldType.setFielddata(true);
final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name()) final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name())
@ -192,7 +192,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/47469") @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/47469")
public void testSingleValuedFieldWithFormatter() throws IOException { public void testSingleValuedFieldWithFormatter() throws IOException {
TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text", randomBoolean());
fieldType.setFielddata(true); fieldType.setFielddata(true);
StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field("text") StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field("text")
@ -218,7 +218,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
public void testNestedAggregation() throws IOException { public void testNestedAggregation() throws IOException {
MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER);
TextFieldMapper.TextFieldType textFieldType = new TextFieldMapper.TextFieldType("text"); TextFieldMapper.TextFieldType textFieldType = new TextFieldMapper.TextFieldType("text", randomBoolean());
textFieldType.setFielddata(true); textFieldType.setFielddata(true);
TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.NUMERIC) TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.NUMERIC)
@ -273,7 +273,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
} }
public void testValueScriptSingleValuedField() throws IOException { public void testValueScriptSingleValuedField() throws IOException {
final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text", randomBoolean());
fieldType.setFielddata(true); fieldType.setFielddata(true);
final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name()) final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name())
@ -295,7 +295,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
} }
public void testValueScriptMultiValuedField() throws IOException { public void testValueScriptMultiValuedField() throws IOException {
final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text", randomBoolean());
fieldType.setFielddata(true); fieldType.setFielddata(true);
final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name()) final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name())
@ -322,7 +322,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
} }
public void testFieldScriptSingleValuedField() throws IOException { public void testFieldScriptSingleValuedField() throws IOException {
final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text", randomBoolean());
fieldType.setFielddata(true); fieldType.setFielddata(true);
final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").script( final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").script(
@ -345,7 +345,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
} }
public void testFieldScriptMultiValuedField() throws IOException { public void testFieldScriptMultiValuedField() throws IOException {
final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text", randomBoolean());
fieldType.setFielddata(true); fieldType.setFielddata(true);
final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").script( final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").script(
@ -377,7 +377,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
CheckedConsumer<RandomIndexWriter, IOException> buildIndex, CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
Consumer<InternalStringStats> verify Consumer<InternalStringStats> verify
) throws IOException { ) throws IOException {
TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text", randomBoolean());
fieldType.setFielddata(true); fieldType.setFielddata(true);
AggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field("text"); AggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field("text");

View file

@ -537,7 +537,7 @@ public class TopMetricsAggregatorTests extends AggregatorTestCase {
} }
private MappedFieldType textFieldType(String name) { private MappedFieldType textFieldType(String name) {
return new TextFieldMapper.TextFieldType(name); return new TextFieldMapper.TextFieldType(name, randomBoolean());
} }
private MappedFieldType geoPointFieldType(String name) { private MappedFieldType geoPointFieldType(String name) {

View file

@ -79,7 +79,7 @@ public class CategorizeTextAggregatorTests extends AggregatorTestCase {
}, },
new AggTestConfig( new AggTestConfig(
new CategorizeTextAggregationBuilder("my_agg", TEXT_FIELD_NAME), new CategorizeTextAggregationBuilder("my_agg", TEXT_FIELD_NAME),
new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME), new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME, randomBoolean()),
longField(NUMERIC_FIELD_NAME) longField(NUMERIC_FIELD_NAME)
) )
); );
@ -114,7 +114,13 @@ public class CategorizeTextAggregatorTests extends AggregatorTestCase {
assertThat(((Max) result.getBuckets().get(1).getAggregations().get("max")).value(), equalTo(4.0)); assertThat(((Max) result.getBuckets().get(1).getAggregations().get("max")).value(), equalTo(4.0));
assertThat(((Min) result.getBuckets().get(1).getAggregations().get("min")).value(), equalTo(0.0)); assertThat(((Min) result.getBuckets().get(1).getAggregations().get("min")).value(), equalTo(0.0));
assertThat(((Avg) result.getBuckets().get(1).getAggregations().get("avg")).getValue(), equalTo(2.0)); assertThat(((Avg) result.getBuckets().get(1).getAggregations().get("avg")).getValue(), equalTo(2.0));
}, new AggTestConfig(aggBuilder, new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME), longField(NUMERIC_FIELD_NAME))); },
new AggTestConfig(
aggBuilder,
new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME, randomBoolean()),
longField(NUMERIC_FIELD_NAME)
)
);
} }
public void testCategorizationWithMultiBucketSubAggs() throws Exception { public void testCategorizationWithMultiBucketSubAggs() throws Exception {
@ -163,7 +169,13 @@ public class CategorizeTextAggregatorTests extends AggregatorTestCase {
assertThat(histo.getBuckets().get(2).getDocCount(), equalTo(1L)); assertThat(histo.getBuckets().get(2).getDocCount(), equalTo(1L));
assertThat(((Avg) histo.getBuckets().get(0).getAggregations().get("avg")).getValue(), equalTo(0.0)); assertThat(((Avg) histo.getBuckets().get(0).getAggregations().get("avg")).getValue(), equalTo(0.0));
assertThat(((Avg) histo.getBuckets().get(2).getAggregations().get("avg")).getValue(), equalTo(4.0)); assertThat(((Avg) histo.getBuckets().get(2).getAggregations().get("avg")).getValue(), equalTo(4.0));
}, new AggTestConfig(aggBuilder, new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME), longField(NUMERIC_FIELD_NAME))); },
new AggTestConfig(
aggBuilder,
new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME, randomBoolean()),
longField(NUMERIC_FIELD_NAME)
)
);
} }
public void testCategorizationAsSubAgg() throws Exception { public void testCategorizationAsSubAgg() throws Exception {
@ -245,7 +257,13 @@ public class CategorizeTextAggregatorTests extends AggregatorTestCase {
assertThat(((Max) categorizationAggregation.getBuckets().get(1).getAggregations().get("max")).value(), equalTo(4.0)); assertThat(((Max) categorizationAggregation.getBuckets().get(1).getAggregations().get("max")).value(), equalTo(4.0));
assertThat(((Min) categorizationAggregation.getBuckets().get(1).getAggregations().get("min")).value(), equalTo(4.0)); assertThat(((Min) categorizationAggregation.getBuckets().get(1).getAggregations().get("min")).value(), equalTo(4.0));
assertThat(((Avg) categorizationAggregation.getBuckets().get(1).getAggregations().get("avg")).getValue(), equalTo(4.0)); assertThat(((Avg) categorizationAggregation.getBuckets().get(1).getAggregations().get("avg")).getValue(), equalTo(4.0));
}, new AggTestConfig(aggBuilder, new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME), longField(NUMERIC_FIELD_NAME))); },
new AggTestConfig(
aggBuilder,
new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME, randomBoolean()),
longField(NUMERIC_FIELD_NAME)
)
);
} }
public void testCategorizationWithSubAggsManyDocs() throws Exception { public void testCategorizationWithSubAggsManyDocs() throws Exception {
@ -294,7 +312,13 @@ public class CategorizeTextAggregatorTests extends AggregatorTestCase {
assertThat(histo.getBuckets().get(2).getDocCount(), equalTo(5000L)); assertThat(histo.getBuckets().get(2).getDocCount(), equalTo(5000L));
assertThat(((Avg) histo.getBuckets().get(0).getAggregations().get("avg")).getValue(), equalTo(0.0)); assertThat(((Avg) histo.getBuckets().get(0).getAggregations().get("avg")).getValue(), equalTo(0.0));
assertThat(((Avg) histo.getBuckets().get(2).getAggregations().get("avg")).getValue(), equalTo(4.0)); assertThat(((Avg) histo.getBuckets().get(2).getAggregations().get("avg")).getValue(), equalTo(4.0));
}, new AggTestConfig(aggBuilder, new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME), longField(NUMERIC_FIELD_NAME))); },
new AggTestConfig(
aggBuilder,
new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME, randomBoolean()),
longField(NUMERIC_FIELD_NAME)
)
);
} }
private static void writeTestDocs(RandomIndexWriter w) throws IOException { private static void writeTestDocs(RandomIndexWriter w) throws IOException {

View file

@ -9,6 +9,7 @@ package org.elasticsearch.xpack.downsample;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.index.mapper.TimeSeriesParams;
import java.io.IOException; import java.io.IOException;
@ -29,7 +30,8 @@ class TimeseriesFieldTypeHelper {
} }
public boolean isTimeSeriesLabel(final String field, final Map<String, ?> unused) { public boolean isTimeSeriesLabel(final String field, final Map<String, ?> unused) {
final MappedFieldType fieldType = mapperService.mappingLookup().getFieldType(field); final MappingLookup lookup = mapperService.mappingLookup();
final MappedFieldType fieldType = lookup.getFieldType(field);
return fieldType != null return fieldType != null
&& (timestampField.equals(field) == false) && (timestampField.equals(field) == false)
&& (fieldType.isAggregatable()) && (fieldType.isAggregatable())

View file

@ -60,6 +60,8 @@ import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperBuilderContext;
import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.MapperTestCase;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedLookup;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContext;
@ -1086,6 +1088,7 @@ public class WildcardFieldMapperTests extends MapperTestCase {
IndexFieldData.Builder builder = fieldType.fielddataBuilder(fdc); IndexFieldData.Builder builder = fieldType.fielddataBuilder(fdc);
return builder.build(new IndexFieldDataCache.None(), null); return builder.build(new IndexFieldDataCache.None(), null);
}; };
MappingLookup lookup = MappingLookup.fromMapping(Mapping.EMPTY);
return new SearchExecutionContext( return new SearchExecutionContext(
0, 0,
0, 0,
@ -1093,7 +1096,7 @@ public class WildcardFieldMapperTests extends MapperTestCase {
bitsetFilterCache, bitsetFilterCache,
indexFieldDataLookup, indexFieldDataLookup,
null, null,
null, lookup,
null, null,
null, null,
parserConfig(), parserConfig(),

View file

@ -5,6 +5,10 @@ import static org.elasticsearch.gradle.internal.distribution.InternalElasticsear
apply plugin: 'elasticsearch.test.fixtures' apply plugin: 'elasticsearch.test.fixtures'
apply plugin: 'elasticsearch.internal-distribution-download' apply plugin: 'elasticsearch.internal-distribution-download'
dockerCompose {
composeAdditionalArgs = ['--compatibility']
}
tasks.register("copyKeystore", Sync) { tasks.register("copyKeystore", Sync) {
from project(':x-pack:plugin:core') from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')