Rename Mapper#name to Mapper#fullPath (#110040)

This addresses a long standing TODO that caused quite a few bugs over time, in that the mapper name does not include its full path, while the MappedFieldType name does.

We have renamed Mapper.Builder#name to leafName (#109971) and Mapper#simpleName to leafName (#110030). This commit renames Mapper#name to fullPath for clarity
This required some adjustments in FieldAliasMapper to avoid confusion between the existing path method and fullPath. I renamed path to targetPath for clarity.
ObjectMapper already had a fullPath method that returned name, and was effectively a copy of name, so it could be removed.
This commit is contained in:
Luca Cavanna 2024-06-21 22:47:27 +02:00 committed by GitHub
parent b0d9f95e53
commit 915e4a50c5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
71 changed files with 354 additions and 312 deletions

View file

@ -217,7 +217,7 @@ public class DataStreamIndexSettingsProvider implements IndexSettingProvider {
private static void extractPath(List<String> routingPaths, Mapper mapper) { private static void extractPath(List<String> routingPaths, Mapper mapper) {
if (mapper instanceof KeywordFieldMapper keywordFieldMapper) { if (mapper instanceof KeywordFieldMapper keywordFieldMapper) {
if (keywordFieldMapper.fieldType().isDimension()) { if (keywordFieldMapper.fieldType().isDimension()) {
routingPaths.add(mapper.name()); routingPaths.add(mapper.fullPath());
} }
} }
} }

View file

@ -621,7 +621,7 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
protected void checkIncomingMergeType(FieldMapper mergeWith) { protected void checkIncomingMergeType(FieldMapper mergeWith) {
if (mergeWith instanceof LegacyGeoShapeFieldMapper == false && CONTENT_TYPE.equals(mergeWith.typeName())) { if (mergeWith instanceof LegacyGeoShapeFieldMapper == false && CONTENT_TYPE.equals(mergeWith.typeName())) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"mapper [" + name() + "] of type [geo_shape] cannot change strategy from [recursive] to [BKD]" "mapper [" + fullPath() + "] of type [geo_shape] cannot change strategy from [recursive] to [BKD]"
); );
} }
super.checkIncomingMergeType(mergeWith); super.checkIncomingMergeType(mergeWith);

View file

@ -444,7 +444,7 @@ public class MatchOnlyTextFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
return new StringStoredFieldFieldLoader(fieldType().storedFieldNameForSyntheticSource(), leafName(), null) { return new StringStoredFieldFieldLoader(fieldType().storedFieldNameForSyntheticSource(), leafName(), null) {

View file

@ -205,9 +205,9 @@ public class RankFeatureFieldMapper extends FieldMapper {
value = context.parser().floatValue(); value = context.parser().floatValue();
} }
if (context.doc().getByKey(name()) != null) { if (context.doc().getByKey(fullPath()) != null) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"[rank_feature] fields do not support indexing multiple values for the same field [" + name() + "] in the same document" "[rank_feature] fields do not support indexing multiple values for the same field [" + fullPath() + "] in the same document"
); );
} }
@ -215,7 +215,7 @@ public class RankFeatureFieldMapper extends FieldMapper {
value = 1 / value; value = 1 / value;
} }
context.doc().addWithKey(name(), new FeatureField(NAME, name(), value)); context.doc().addWithKey(fullPath(), new FeatureField(NAME, fullPath(), value));
} }
private static Float objectToFloat(Object value) { private static Float objectToFloat(Object value) {

View file

@ -174,7 +174,7 @@ public class RankFeaturesFieldMapper extends FieldMapper {
} else if (token == Token.VALUE_NULL) { } else if (token == Token.VALUE_NULL) {
// ignore feature, this is consistent with numeric fields // ignore feature, this is consistent with numeric fields
} else if (token == Token.VALUE_NUMBER || token == Token.VALUE_STRING) { } else if (token == Token.VALUE_NUMBER || token == Token.VALUE_STRING) {
final String key = name() + "." + feature; final String key = fullPath() + "." + feature;
float value = context.parser().floatValue(true); float value = context.parser().floatValue(true);
if (context.doc().getByKey(key) != null) { if (context.doc().getByKey(key) != null) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
@ -187,7 +187,7 @@ public class RankFeaturesFieldMapper extends FieldMapper {
if (positiveScoreImpact == false) { if (positiveScoreImpact == false) {
value = 1 / value; value = 1 / value;
} }
context.doc().addWithKey(key, new FeatureField(name(), feature, value)); context.doc().addWithKey(key, new FeatureField(fullPath(), feature, value));
} else { } else {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"[rank_features] fields take hashes that map a feature to a strictly positive " "[rank_features] fields take hashes that map a feature to a strictly positive "

View file

@ -531,7 +531,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
context.addIgnoredField(mappedFieldType.name()); context.addIgnoredField(mappedFieldType.name());
if (isSourceSynthetic) { if (isSourceSynthetic) {
// Save a copy of the field so synthetic source can load it // Save a copy of the field so synthetic source can load it
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser())); context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
} }
return; return;
} else { } else {
@ -559,7 +559,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
context.addIgnoredField(mappedFieldType.name()); context.addIgnoredField(mappedFieldType.name());
if (isSourceSynthetic) { if (isSourceSynthetic) {
// Save a copy of the field so synthetic source can load it // Save a copy of the field so synthetic source can load it
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser())); context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
} }
return; return;
} else { } else {
@ -721,15 +721,19 @@ public class ScaledFloatFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (hasDocValues == false) { if (hasDocValues == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values" "field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support synthetic source because it doesn't have doc values"
); );
} }
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
return new SortedNumericDocValuesSyntheticFieldLoader(name(), leafName(), ignoreMalformed.value()) { return new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value()) {
@Override @Override
protected void writeValue(XContentBuilder b, long value) throws IOException { protected void writeValue(XContentBuilder b, long value) throws IOException {
b.value(decodeForSyntheticSource(value, scalingFactor)); b.value(decodeForSyntheticSource(value, scalingFactor));

View file

@ -163,7 +163,7 @@ public class TokenCountFieldMapper extends FieldMapper {
if (value == null) { if (value == null) {
tokenCount = nullValue; tokenCount = nullValue;
} else { } else {
tokenCount = countPositions(analyzer, name(), value, enablePositionIncrements); tokenCount = countPositions(analyzer, fullPath(), value, enablePositionIncrements);
} }
NumberFieldMapper.NumberType.INTEGER.addFields(context.doc(), fieldType().name(), tokenCount, index, hasDocValues, store); NumberFieldMapper.NumberType.INTEGER.addFields(context.doc(), fieldType().name(), tokenCount, index, hasDocValues, store);

View file

@ -690,7 +690,7 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase {
assertSearchAsYouTypeFieldType(mapper, mapper.fieldType(), maxShingleSize, analyzerName, mapper.prefixField().fieldType()); assertSearchAsYouTypeFieldType(mapper, mapper.fieldType(), maxShingleSize, analyzerName, mapper.prefixField().fieldType());
assertThat(mapper.prefixField(), notNullValue()); assertThat(mapper.prefixField(), notNullValue());
assertThat(mapper.prefixField().fieldType().parentField, equalTo(mapper.name())); assertThat(mapper.prefixField().fieldType().parentField, equalTo(mapper.fullPath()));
assertPrefixFieldType(mapper.prefixField(), mapper.indexAnalyzers(), maxShingleSize, analyzerName); assertPrefixFieldType(mapper.prefixField(), mapper.indexAnalyzers(), maxShingleSize, analyzerName);
for (int shingleSize = 2; shingleSize <= maxShingleSize; shingleSize++) { for (int shingleSize = 2; shingleSize <= maxShingleSize; shingleSize++) {
@ -709,7 +709,7 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase {
assertThat(mapper.shingleFields().length, equalTo(numberOfShingleSubfields)); assertThat(mapper.shingleFields().length, equalTo(numberOfShingleSubfields));
final Set<String> fieldsUsingSourcePath = new HashSet<>(); final Set<String> fieldsUsingSourcePath = new HashSet<>();
mapper.sourcePathUsedBy().forEachRemaining(mapper1 -> fieldsUsingSourcePath.add(mapper1.name())); mapper.sourcePathUsedBy().forEachRemaining(mapper1 -> fieldsUsingSourcePath.add(mapper1.fullPath()));
int multiFields = 0; int multiFields = 0;
for (FieldMapper ignored : mapper.multiFields()) { for (FieldMapper ignored : mapper.multiFields()) {
multiFields++; multiFields++;
@ -717,12 +717,12 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase {
assertThat(fieldsUsingSourcePath.size(), equalTo(numberOfShingleSubfields + 1 + multiFields)); assertThat(fieldsUsingSourcePath.size(), equalTo(numberOfShingleSubfields + 1 + multiFields));
final Set<String> expectedFieldsUsingSourcePath = new HashSet<>(); final Set<String> expectedFieldsUsingSourcePath = new HashSet<>();
expectedFieldsUsingSourcePath.add(mapper.prefixField().name()); expectedFieldsUsingSourcePath.add(mapper.prefixField().fullPath());
for (ShingleFieldMapper shingleFieldMapper : mapper.shingleFields()) { for (ShingleFieldMapper shingleFieldMapper : mapper.shingleFields()) {
expectedFieldsUsingSourcePath.add(shingleFieldMapper.name()); expectedFieldsUsingSourcePath.add(shingleFieldMapper.fullPath());
} }
for (FieldMapper multiField : mapper.multiFields()) { for (FieldMapper multiField : mapper.multiFields()) {
expectedFieldsUsingSourcePath.add(multiField.name()); expectedFieldsUsingSourcePath.add(multiField.fullPath());
} }
assertThat(fieldsUsingSourcePath, equalTo(expectedFieldsUsingSourcePath)); assertThat(fieldsUsingSourcePath, equalTo(expectedFieldsUsingSourcePath));
} }

View file

@ -128,7 +128,7 @@ public final class ParentJoinFieldMapper extends FieldMapper {
relations.get() relations.get()
.stream() .stream()
.map(relation -> new ParentIdFieldMapper(leafName() + "#" + relation.parent(), eagerGlobalOrdinals.get())) .map(relation -> new ParentIdFieldMapper(leafName() + "#" + relation.parent(), eagerGlobalOrdinals.get()))
.forEach(mapper -> parentIdFields.put(mapper.name(), mapper)); .forEach(mapper -> parentIdFields.put(mapper.fullPath(), mapper));
Joiner joiner = new Joiner(leafName(), relations.get()); Joiner joiner = new Joiner(leafName(), relations.get());
return new ParentJoinFieldMapper( return new ParentJoinFieldMapper(
leafName(), leafName(),
@ -264,13 +264,17 @@ public final class ParentJoinFieldMapper extends FieldMapper {
} else if ("parent".equals(currentFieldName)) { } else if ("parent".equals(currentFieldName)) {
parent = context.parser().text(); parent = context.parser().text();
} else { } else {
throw new IllegalArgumentException("unknown field name [" + currentFieldName + "] in join field [" + name() + "]"); throw new IllegalArgumentException(
"unknown field name [" + currentFieldName + "] in join field [" + fullPath() + "]"
);
} }
} else if (token == XContentParser.Token.VALUE_NUMBER) { } else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("parent".equals(currentFieldName)) { if ("parent".equals(currentFieldName)) {
parent = context.parser().numberValue().toString(); parent = context.parser().numberValue().toString();
} else { } else {
throw new IllegalArgumentException("unknown field name [" + currentFieldName + "] in join field [" + name() + "]"); throw new IllegalArgumentException(
"unknown field name [" + currentFieldName + "] in join field [" + fullPath() + "]"
);
} }
} }
} }
@ -278,23 +282,23 @@ public final class ParentJoinFieldMapper extends FieldMapper {
name = context.parser().text(); name = context.parser().text();
parent = null; parent = null;
} else { } else {
throw new IllegalStateException("[" + name() + "] expected START_OBJECT or VALUE_STRING but was: " + token); throw new IllegalStateException("[" + fullPath() + "] expected START_OBJECT or VALUE_STRING but was: " + token);
} }
if (name == null) { if (name == null) {
throw new IllegalArgumentException("null join name in field [" + name() + "]"); throw new IllegalArgumentException("null join name in field [" + fullPath() + "]");
} }
if (fieldType().joiner.knownRelation(name) == false) { if (fieldType().joiner.knownRelation(name) == false) {
throw new IllegalArgumentException("unknown join name [" + name + "] for field [" + name() + "]"); throw new IllegalArgumentException("unknown join name [" + name + "] for field [" + fullPath() + "]");
} }
if (fieldType().joiner.childTypeExists(name)) { if (fieldType().joiner.childTypeExists(name)) {
// Index the document as a child // Index the document as a child
if (parent == null) { if (parent == null) {
throw new IllegalArgumentException("[parent] is missing for join field [" + name() + "]"); throw new IllegalArgumentException("[parent] is missing for join field [" + fullPath() + "]");
} }
if (context.routing() == null) { if (context.routing() == null) {
throw new IllegalArgumentException("[routing] is missing for join field [" + name() + "]"); throw new IllegalArgumentException("[routing] is missing for join field [" + fullPath() + "]");
} }
String fieldName = fieldType().joiner.parentJoinField(name); String fieldName = fieldType().joiner.parentJoinField(name);
parentIdFields.get(fieldName).indexValue(context, parent); parentIdFields.get(fieldName).indexValue(context, parent);

View file

@ -430,13 +430,13 @@ public class ParentJoinFieldMapperTests extends MapperServiceTestCase {
Iterator<Mapper> it = mapper.iterator(); Iterator<Mapper> it = mapper.iterator();
FieldMapper next = (FieldMapper) it.next(); FieldMapper next = (FieldMapper) it.next();
assertThat(next.name(), equalTo("join_field#parent")); assertThat(next.fullPath(), equalTo("join_field#parent"));
assertTrue(next.fieldType().isSearchable()); assertTrue(next.fieldType().isSearchable());
assertTrue(next.fieldType().isAggregatable()); assertTrue(next.fieldType().isAggregatable());
assertTrue(it.hasNext()); assertTrue(it.hasNext());
next = (FieldMapper) it.next(); next = (FieldMapper) it.next();
assertThat(next.name(), equalTo("join_field#child")); assertThat(next.fullPath(), equalTo("join_field#child"));
assertTrue(next.fieldType().isSearchable()); assertTrue(next.fieldType().isSearchable());
assertTrue(next.fieldType().isAggregatable()); assertTrue(next.fieldType().isAggregatable());

View file

@ -407,7 +407,7 @@ public class PercolatorFieldMapper extends FieldMapper {
@Override @Override
public void parse(DocumentParserContext context) throws IOException { public void parse(DocumentParserContext context) throws IOException {
SearchExecutionContext executionContext = this.searchExecutionContext.get(); SearchExecutionContext executionContext = this.searchExecutionContext.get();
if (context.doc().getField(queryBuilderField.name()) != null) { if (context.doc().getField(queryBuilderField.fullPath()) != null) {
// If a percolator query has been defined in an array object then multiple percolator queries // If a percolator query has been defined in an array object then multiple percolator queries
// could be provided. In order to prevent this we fail if we try to parse more than one query // could be provided. In order to prevent this we fail if we try to parse more than one query
// for the current document. // for the current document.
@ -493,27 +493,27 @@ public class PercolatorFieldMapper extends FieldMapper {
builder.append(new BytesRef(extraction.field())); builder.append(new BytesRef(extraction.field()));
builder.append(FIELD_VALUE_SEPARATOR); builder.append(FIELD_VALUE_SEPARATOR);
builder.append(extraction.bytes()); builder.append(extraction.bytes());
doc.add(new StringField(queryTermsField.name(), builder.toBytesRef(), Field.Store.NO)); doc.add(new StringField(queryTermsField.fullPath(), builder.toBytesRef(), Field.Store.NO));
} else if (extraction.range != null) { } else if (extraction.range != null) {
byte[] min = extraction.range.lowerPoint; byte[] min = extraction.range.lowerPoint;
byte[] max = extraction.range.upperPoint; byte[] max = extraction.range.upperPoint;
doc.add(new BinaryRange(rangeFieldMapper.name(), encodeRange(extraction.range.fieldName, min, max))); doc.add(new BinaryRange(rangeFieldMapper.fullPath(), encodeRange(extraction.range.fieldName, min, max)));
} }
} }
if (result.matchAllDocs) { if (result.matchAllDocs) {
doc.add(new StringField(extractionResultField.name(), EXTRACTION_FAILED, Field.Store.NO)); doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_FAILED, Field.Store.NO));
if (result.verified) { if (result.verified) {
doc.add(new StringField(extractionResultField.name(), EXTRACTION_COMPLETE, Field.Store.NO)); doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_COMPLETE, Field.Store.NO));
} }
} else if (result.verified) { } else if (result.verified) {
doc.add(new StringField(extractionResultField.name(), EXTRACTION_COMPLETE, Field.Store.NO)); doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_COMPLETE, Field.Store.NO));
} else { } else {
doc.add(new StringField(extractionResultField.name(), EXTRACTION_PARTIAL, Field.Store.NO)); doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_PARTIAL, Field.Store.NO));
} }
context.addToFieldNames(fieldType().name()); context.addToFieldNames(fieldType().name());
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch)); doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.fullPath(), result.minimumShouldMatch));
} }
static SearchExecutionContext configureContext(SearchExecutionContext context, boolean mapUnmappedFieldsAsString) { static SearchExecutionContext configureContext(SearchExecutionContext context, boolean mapUnmappedFieldsAsString) {

View file

@ -87,7 +87,7 @@ public class QueryBuilderStoreTests extends ESTestCase {
when(searchExecutionContext.getWriteableRegistry()).thenReturn(writableRegistry()); when(searchExecutionContext.getWriteableRegistry()).thenReturn(writableRegistry());
when(searchExecutionContext.getParserConfig()).thenReturn(parserConfig()); when(searchExecutionContext.getParserConfig()).thenReturn(parserConfig());
when(searchExecutionContext.getForField(fieldMapper.fieldType(), fielddataOperation)).thenReturn( when(searchExecutionContext.getForField(fieldMapper.fieldType(), fielddataOperation)).thenReturn(
new BytesBinaryIndexFieldData(fieldMapper.name(), CoreValuesSourceType.KEYWORD) new BytesBinaryIndexFieldData(fieldMapper.fullPath(), CoreValuesSourceType.KEYWORD)
); );
when(searchExecutionContext.getFieldType(Mockito.anyString())).thenAnswer(invocation -> { when(searchExecutionContext.getFieldType(Mockito.anyString())).thenAnswer(invocation -> {
final String fieldName = (String) invocation.getArguments()[0]; final String fieldName = (String) invocation.getArguments()[0];

View file

@ -526,7 +526,7 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
} }
if (value.length() > ignoreAbove) { if (value.length() > ignoreAbove) {
context.addIgnoredField(name()); context.addIgnoredField(fullPath());
return; return;
} }

View file

@ -580,11 +580,11 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
if (fieldType.stored()) { if (fieldType.stored()) {
return new StringStoredFieldFieldLoader(name(), leafName(), null) { return new StringStoredFieldFieldLoader(fullPath(), leafName(), null) {
@Override @Override
protected void write(XContentBuilder b, Object value) throws IOException { protected void write(XContentBuilder b, Object value) throws IOException {
b.value((String) value); b.value((String) value);
@ -602,7 +602,7 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
Locale.ROOT, Locale.ROOT,
"field [%s] of type [%s] doesn't support synthetic source unless it is stored or has a sub-field of" "field [%s] of type [%s] doesn't support synthetic source unless it is stored or has a sub-field of"
+ " type [keyword] with doc values or stored and without a normalizer", + " type [keyword] with doc values or stored and without a normalizer",
name(), fullPath(),
typeName() typeName()
) )
); );

View file

@ -116,7 +116,7 @@ public class Murmur3FieldMapper extends FieldMapper {
final long hash = MurmurHash3.hash128(bytes.bytes, bytes.offset, bytes.length, 0, new MurmurHash3.Hash128()).h1; final long hash = MurmurHash3.hash128(bytes.bytes, bytes.offset, bytes.length, 0, new MurmurHash3.Hash128()).h1;
context.doc().add(new SortedNumericDocValuesField(fieldType().name(), hash)); context.doc().add(new SortedNumericDocValuesField(fieldType().name(), hash));
if (fieldType().isStored()) { if (fieldType().isStored()) {
context.doc().add(new StoredField(name(), hash)); context.doc().add(new StoredField(fullPath(), hash));
} }
} }
} }

View file

@ -90,7 +90,7 @@ public class SizeFieldMapper extends MetadataFieldMapper {
return; return;
} }
final int value = context.sourceToParse().source().length(); final int value = context.sourceToParse().source().length();
NumberType.INTEGER.addFields(context.doc(), name(), value, true, true, true); NumberType.INTEGER.addFields(context.doc(), fullPath(), value, true, true, true);
} }
@Override @Override

View file

@ -859,7 +859,7 @@ public class FieldCapabilitiesIT extends ESIntegTestCase {
@Override @Override
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
return new StringStoredFieldFieldLoader(name(), leafName(), null) { return new StringStoredFieldFieldLoader(fullPath(), leafName(), null) {
@Override @Override
protected void write(XContentBuilder b, Object value) throws IOException { protected void write(XContentBuilder b, Object value) throws IOException {
BytesRef ref = (BytesRef) value; BytesRef ref = (BytesRef) value;

View file

@ -124,7 +124,7 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte
private static final ParseField MAPPING = new ParseField("mapping"); private static final ParseField MAPPING = new ParseField("mapping");
/** /**
* Returns the mappings as a map. Note that the returned map has a single key which is always the field's {@link Mapper#name}. * Returns the mappings as a map. Note that the returned map has a single key which is always the field's {@link Mapper#fullPath}.
*/ */
public Map<String, Object> sourceAsMap() { public Map<String, Object> sourceAsMap() {
return XContentHelper.convertToMap(source, true, XContentType.JSON).v2(); return XContentHelper.convertToMap(source, true, XContentType.JSON).v2();

View file

@ -158,12 +158,12 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
for (String field : request.fields()) { for (String field : request.fields()) {
if (Regex.isMatchAllPattern(field)) { if (Regex.isMatchAllPattern(field)) {
for (Mapper fieldMapper : mappingLookup.fieldMappers()) { for (Mapper fieldMapper : mappingLookup.fieldMappers()) {
addFieldMapper(fieldPredicate, fieldMapper.name(), fieldMapper, fieldMappings, request.includeDefaults()); addFieldMapper(fieldPredicate, fieldMapper.fullPath(), fieldMapper, fieldMappings, request.includeDefaults());
} }
} else if (Regex.isSimpleMatchPattern(field)) { } else if (Regex.isSimpleMatchPattern(field)) {
for (Mapper fieldMapper : mappingLookup.fieldMappers()) { for (Mapper fieldMapper : mappingLookup.fieldMappers()) {
if (Regex.simpleMatch(field, fieldMapper.name())) { if (Regex.simpleMatch(field, fieldMapper.fullPath())) {
addFieldMapper(fieldPredicate, fieldMapper.name(), fieldMapper, fieldMappings, request.includeDefaults()); addFieldMapper(fieldPredicate, fieldMapper.fullPath(), fieldMapper, fieldMappings, request.includeDefaults());
} }
} }
} else { } else {
@ -195,7 +195,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS,
false false
); );
fieldMappings.put(field, new FieldMappingMetadata(fieldMapper.name(), bytes)); fieldMappings.put(field, new FieldMappingMetadata(fieldMapper.fullPath(), bytes));
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e); throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e);
} }

View file

@ -315,7 +315,7 @@ public abstract class Engine implements Closeable {
for (Mapper mapper : mappingLookup.fieldMappers()) { for (Mapper mapper : mappingLookup.fieldMappers()) {
if (mapper instanceof FieldMapper fieldMapper) { if (mapper instanceof FieldMapper fieldMapper) {
if (fieldMapper.fieldType() instanceof SparseVectorFieldMapper.SparseVectorFieldType) { if (fieldMapper.fieldType() instanceof SparseVectorFieldMapper.SparseVectorFieldType) {
mappers.put(fieldMapper.name(), fieldMapper); mappers.put(fieldMapper.fullPath(), fieldMapper);
} }
} }
} }

View file

@ -209,16 +209,20 @@ public class BinaryFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
if (hasDocValues == false) { if (hasDocValues == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values" "field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support synthetic source because it doesn't have doc values"
); );
} }
return new BinaryDocValuesSyntheticFieldLoader(name()) { return new BinaryDocValuesSyntheticFieldLoader(fullPath()) {
@Override @Override
protected void writeValue(XContentBuilder b, BytesRef value) throws IOException { protected void writeValue(XContentBuilder b, BytesRef value) throws IOException {
var in = new ByteArrayStreamInput(); var in = new ByteArrayStreamInput();

View file

@ -441,7 +441,7 @@ public class BooleanFieldMapper extends FieldMapper {
context.addIgnoredField(mappedFieldType.name()); context.addIgnoredField(mappedFieldType.name());
if (storeMalformedFields) { if (storeMalformedFields) {
// Save a copy of the field so synthetic source can load it // Save a copy of the field so synthetic source can load it
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser())); context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
} }
} else { } else {
throw e; throw e;
@ -505,15 +505,19 @@ public class BooleanFieldMapper extends FieldMapper {
} }
if (hasDocValues == false) { if (hasDocValues == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values" "field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support synthetic source because it doesn't have doc values"
); );
} }
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
return new SortedNumericDocValuesSyntheticFieldLoader(name(), leafName(), ignoreMalformed.value()) { return new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value()) {
@Override @Override
protected void writeValue(XContentBuilder b, long value) throws IOException { protected void writeValue(XContentBuilder b, long value) throws IOException {
b.value(value == 1); b.value(value == 1);

View file

@ -929,7 +929,7 @@ public final class DateFieldMapper extends FieldMapper {
context.addIgnoredField(mappedFieldType.name()); context.addIgnoredField(mappedFieldType.name());
if (isSourceSynthetic) { if (isSourceSynthetic) {
// Save a copy of the field so synthetic source can load it // Save a copy of the field so synthetic source can load it
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser())); context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
} }
return; return;
} else { } else {
@ -989,15 +989,19 @@ public final class DateFieldMapper extends FieldMapper {
} }
if (hasDocValues == false) { if (hasDocValues == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values" "field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support synthetic source because it doesn't have doc values"
); );
} }
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
return new SortedNumericDocValuesSyntheticFieldLoader(name(), leafName(), ignoreMalformed) { return new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed) {
@Override @Override
protected void writeValue(XContentBuilder b, long value) throws IOException { protected void writeValue(XContentBuilder b, long value) throws IOException {
b.value(fieldType().format(value, fieldType().dateTimeFormatter())); b.value(fieldType().format(value, fieldType().dateTimeFormatter()));

View file

@ -50,7 +50,7 @@ public class DocumentMapper {
MapperMetrics mapperMetrics MapperMetrics mapperMetrics
) { ) {
this.documentParser = documentParser; this.documentParser = documentParser;
this.type = mapping.getRoot().name(); this.type = mapping.getRoot().fullPath();
this.mappingLookup = MappingLookup.fromMapping(mapping); this.mappingLookup = MappingLookup.fromMapping(mapping);
this.mappingSource = source; this.mappingSource = source;
this.mapperMetrics = mapperMetrics; this.mapperMetrics = mapperMetrics;

View file

@ -169,7 +169,7 @@ public final class DocumentParser {
// the document reader, so to ensure that we don't run them multiple times we // the document reader, so to ensure that we don't run them multiple times we
// guard them with an 'executed' boolean // guard them with an 'executed' boolean
Map<String, Consumer<LeafReaderContext>> fieldScripts = new HashMap<>(); Map<String, Consumer<LeafReaderContext>> fieldScripts = new HashMap<>();
indexTimeScriptMappers.forEach(mapper -> fieldScripts.put(mapper.name(), new Consumer<>() { indexTimeScriptMappers.forEach(mapper -> fieldScripts.put(mapper.fullPath(), new Consumer<>() {
boolean executed = false; boolean executed = false;
@Override @Override
@ -250,7 +250,7 @@ public final class DocumentParser {
return null; return null;
} }
RootObjectMapper.Builder rootBuilder = context.updateRoot(); RootObjectMapper.Builder rootBuilder = context.updateRoot();
context.getDynamicMappers().forEach(mapper -> rootBuilder.addDynamic(mapper.name(), null, mapper, context)); context.getDynamicMappers().forEach(mapper -> rootBuilder.addDynamic(mapper.fullPath(), null, mapper, context));
for (RuntimeField runtimeField : context.getDynamicRuntimeFields()) { for (RuntimeField runtimeField : context.getDynamicRuntimeFields()) {
rootBuilder.addRuntimeField(runtimeField); rootBuilder.addRuntimeField(runtimeField);
@ -294,7 +294,7 @@ public final class DocumentParser {
Tuple<DocumentParserContext, XContentBuilder> tuple = XContentDataHelper.cloneSubContext(context); Tuple<DocumentParserContext, XContentBuilder> tuple = XContentDataHelper.cloneSubContext(context);
context.addIgnoredField( context.addIgnoredField(
new IgnoredSourceFieldMapper.NameValue( new IgnoredSourceFieldMapper.NameValue(
context.parent().name(), context.parent().fullPath(),
context.parent().fullPath().indexOf(context.parent().leafName()), context.parent().fullPath().indexOf(context.parent().leafName()),
XContentDataHelper.encodeXContentBuilder(tuple.v2()), XContentDataHelper.encodeXContentBuilder(tuple.v2()),
context.doc() context.doc()
@ -327,7 +327,7 @@ public final class DocumentParser {
throw new DocumentParsingException( throw new DocumentParsingException(
context.parser().getTokenLocation(), context.parser().getTokenLocation(),
"object mapping for [" "object mapping for ["
+ mapper.name() + mapper.fullPath()
+ "] tried to parse field [" + "] tried to parse field ["
+ currentFieldName + currentFieldName
+ "] as object, but found a concrete value" + "] as object, but found a concrete value"
@ -384,7 +384,7 @@ public final class DocumentParser {
throw new DocumentParsingException( throw new DocumentParsingException(
context.parser().getTokenLocation(), context.parser().getTokenLocation(),
"object mapping for [" "object mapping for ["
+ mapper.name() + mapper.fullPath()
+ "] tried to parse field [" + "] tried to parse field ["
+ context.parser().currentName() + context.parser().currentName()
+ "] as object, but got EOF, has a concrete value been provided to it?" + "] as object, but got EOF, has a concrete value been provided to it?"
@ -440,7 +440,7 @@ public final class DocumentParser {
context.addIgnoredField( context.addIgnoredField(
IgnoredSourceFieldMapper.NameValue.fromContext( IgnoredSourceFieldMapper.NameValue.fromContext(
context, context,
fieldMapper.name(), fieldMapper.fullPath(),
XContentDataHelper.encodeXContentBuilder(contextWithSourceToStore.v2()) XContentDataHelper.encodeXContentBuilder(contextWithSourceToStore.v2())
) )
); );
@ -476,14 +476,14 @@ public final class DocumentParser {
private static void throwOnUnrecognizedMapperType(Mapper mapper) { private static void throwOnUnrecognizedMapperType(Mapper mapper) {
throw new IllegalStateException( throw new IllegalStateException(
"The provided mapper [" + mapper.name() + "] has an unrecognized type [" + mapper.getClass().getSimpleName() + "]." "The provided mapper [" + mapper.fullPath() + "] has an unrecognized type [" + mapper.getClass().getSimpleName() + "]."
); );
} }
private static void throwOnCopyToOnFieldAlias(DocumentParserContext context, Mapper mapper) { private static void throwOnCopyToOnFieldAlias(DocumentParserContext context, Mapper mapper) {
throw new DocumentParsingException( throw new DocumentParsingException(
context.parser().getTokenLocation(), context.parser().getTokenLocation(),
"Cannot " + (context.isWithinCopyTo() ? "copy" : "write") + " to a field alias [" + mapper.name() + "]." "Cannot " + (context.isWithinCopyTo() ? "copy" : "write") + " to a field alias [" + mapper.fullPath() + "]."
); );
} }
@ -491,7 +491,7 @@ public final class DocumentParser {
throw new DocumentParsingException( throw new DocumentParsingException(
context.parser().getTokenLocation(), context.parser().getTokenLocation(),
"Cannot copy field [" "Cannot copy field ["
+ mapper.name() + mapper.fullPath()
+ "] to fields " + "] to fields "
+ copyToFields + copyToFields
+ ". Copy-to currently only works for value-type fields, not objects." + ". Copy-to currently only works for value-type fields, not objects."
@ -564,7 +564,7 @@ public final class DocumentParser {
"Tried to add nested object [" "Tried to add nested object ["
+ dynamicObjectMapper.leafName() + dynamicObjectMapper.leafName()
+ "] to object [" + "] to object ["
+ context.parent().name() + context.parent().fullPath()
+ "] which does not support subobjects" + "] which does not support subobjects"
); );
} }
@ -594,7 +594,7 @@ public final class DocumentParser {
private static void throwOnCreateDynamicNestedViaCopyTo(Mapper dynamicObjectMapper, DocumentParserContext context) { private static void throwOnCreateDynamicNestedViaCopyTo(Mapper dynamicObjectMapper, DocumentParserContext context) {
throw new DocumentParsingException( throw new DocumentParsingException(
context.parser().getTokenLocation(), context.parser().getTokenLocation(),
"It is forbidden to create dynamic nested objects ([" + dynamicObjectMapper.name() + "]) through `copy_to`" "It is forbidden to create dynamic nested objects ([" + dynamicObjectMapper.fullPath() + "]) through `copy_to`"
); );
} }
@ -759,7 +759,7 @@ public final class DocumentParser {
throw new DocumentParsingException( throw new DocumentParsingException(
context.parser().getTokenLocation(), context.parser().getTokenLocation(),
"object mapping for [" "object mapping for ["
+ context.parent().name() + context.parent().fullPath()
+ "] with array for [" + "] with array for ["
+ arrayFieldName + arrayFieldName
+ "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?" + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?"
@ -782,7 +782,7 @@ public final class DocumentParser {
throw new DocumentParsingException( throw new DocumentParsingException(
context.parser().getTokenLocation(), context.parser().getTokenLocation(),
"object mapping [" "object mapping ["
+ context.parent().name() + context.parent().fullPath()
+ "] trying to serialize a value with" + "] trying to serialize a value with"
+ " no field associated with it, current value [" + " no field associated with it, current value ["
+ context.parser().textOrNull() + context.parser().textOrNull()
@ -937,7 +937,7 @@ public final class DocumentParser {
} }
@Override @Override
public String name() { public String fullPath() {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }

View file

@ -373,15 +373,15 @@ public abstract class DocumentParserContext {
public final boolean addDynamicMapper(Mapper mapper) { public final boolean addDynamicMapper(Mapper mapper) {
// eagerly check object depth limit here to avoid stack overflow errors // eagerly check object depth limit here to avoid stack overflow errors
if (mapper instanceof ObjectMapper) { if (mapper instanceof ObjectMapper) {
MappingLookup.checkObjectDepthLimit(indexSettings().getMappingDepthLimit(), mapper.name()); MappingLookup.checkObjectDepthLimit(indexSettings().getMappingDepthLimit(), mapper.fullPath());
} }
// eagerly check field name limit here to avoid OOM errors // eagerly check field name limit here to avoid OOM errors
// only check fields that are not already mapped or tracked in order to avoid hitting field limit too early via double-counting // only check fields that are not already mapped or tracked in order to avoid hitting field limit too early via double-counting
// note that existing fields can also receive dynamic mapping updates (e.g. constant_keyword to fix the value) // note that existing fields can also receive dynamic mapping updates (e.g. constant_keyword to fix the value)
if (mappingLookup.getMapper(mapper.name()) == null if (mappingLookup.getMapper(mapper.fullPath()) == null
&& mappingLookup.objectMappers().containsKey(mapper.name()) == false && mappingLookup.objectMappers().containsKey(mapper.fullPath()) == false
&& dynamicMappers.containsKey(mapper.name()) == false) { && dynamicMappers.containsKey(mapper.fullPath()) == false) {
int mapperSize = mapper.getTotalFieldsCount(); int mapperSize = mapper.getTotalFieldsCount();
int additionalFieldsToAdd = getNewFieldsSize() + mapperSize; int additionalFieldsToAdd = getNewFieldsSize() + mapperSize;
if (indexSettings().isIgnoreDynamicFieldsBeyondLimit()) { if (indexSettings().isIgnoreDynamicFieldsBeyondLimit()) {
@ -391,15 +391,15 @@ public abstract class DocumentParserContext {
addIgnoredField( addIgnoredField(
IgnoredSourceFieldMapper.NameValue.fromContext( IgnoredSourceFieldMapper.NameValue.fromContext(
this, this,
mapper.name(), mapper.fullPath(),
XContentDataHelper.encodeToken(parser()) XContentDataHelper.encodeToken(parser())
) )
); );
} catch (IOException e) { } catch (IOException e) {
throw new IllegalArgumentException("failed to parse field [" + mapper.name() + " ]", e); throw new IllegalArgumentException("failed to parse field [" + mapper.fullPath() + " ]", e);
} }
} }
addIgnoredField(mapper.name()); addIgnoredField(mapper.fullPath());
return false; return false;
} }
} else { } else {
@ -408,7 +408,7 @@ public abstract class DocumentParserContext {
dynamicMappersSize.add(mapperSize); dynamicMappersSize.add(mapperSize);
} }
if (mapper instanceof ObjectMapper objectMapper) { if (mapper instanceof ObjectMapper objectMapper) {
dynamicObjectMappers.put(objectMapper.name(), objectMapper); dynamicObjectMappers.put(objectMapper.fullPath(), objectMapper);
// dynamic object mappers may have been obtained from applying a dynamic template, in which case their definition may contain // dynamic object mappers may have been obtained from applying a dynamic template, in which case their definition may contain
// sub-fields as well as sub-objects that need to be added to the mappings // sub-fields as well as sub-objects that need to be added to the mappings
for (Mapper submapper : objectMapper.mappers.values()) { for (Mapper submapper : objectMapper.mappers.values()) {
@ -425,7 +425,7 @@ public abstract class DocumentParserContext {
// dynamically mapped objects when the incoming document defines no sub-fields in them: // dynamically mapped objects when the incoming document defines no sub-fields in them:
// 1) by default, they would be empty containers in the mappings, is it then important to map them? // 1) by default, they would be empty containers in the mappings, is it then important to map them?
// 2) they can be the result of applying a dynamic template which may define sub-fields or set dynamic, enabled or subobjects. // 2) they can be the result of applying a dynamic template which may define sub-fields or set dynamic, enabled or subobjects.
dynamicMappers.computeIfAbsent(mapper.name(), k -> new ArrayList<>()).add(mapper); dynamicMappers.computeIfAbsent(mapper.fullPath(), k -> new ArrayList<>()).add(mapper);
return true; return true;
} }

View file

@ -32,16 +32,16 @@ public final class FieldAliasMapper extends Mapper {
} }
private final String name; private final String name;
private final String path; private final String targetPath;
public FieldAliasMapper(String simpleName, String name, String path) { public FieldAliasMapper(String simpleName, String name, String targetPath) {
super(simpleName); super(simpleName);
this.name = Mapper.internFieldName(name); this.name = Mapper.internFieldName(name);
this.path = path; this.targetPath = targetPath;
} }
@Override @Override
public String name() { public String fullPath() {
return name; return name;
} }
@ -50,15 +50,15 @@ public final class FieldAliasMapper extends Mapper {
return CONTENT_TYPE; return CONTENT_TYPE;
} }
public String path() { public String targetPath() {
return path; return targetPath;
} }
@Override @Override
public Mapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext) { public Mapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext) {
if ((mergeWith instanceof FieldAliasMapper) == false) { if ((mergeWith instanceof FieldAliasMapper) == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Cannot merge a field alias mapping [" + name() + "] with a mapping that is not for a field alias." "Cannot merge a field alias mapping [" + fullPath() + "] with a mapping that is not for a field alias."
); );
} }
return mergeWith; return mergeWith;
@ -71,37 +71,37 @@ public final class FieldAliasMapper extends Mapper {
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject(leafName()).field("type", CONTENT_TYPE).field(Names.PATH, path).endObject(); return builder.startObject(leafName()).field("type", CONTENT_TYPE).field(Names.PATH, targetPath).endObject();
} }
@Override @Override
public void validate(MappingLookup mappers) { public void validate(MappingLookup mappers) {
if (Objects.equals(this.path(), this.name())) { if (Objects.equals(this.targetPath(), this.fullPath())) {
throw new MapperParsingException( throw new MapperParsingException(
"Invalid [path] value [" + path + "] for field alias [" + name() + "]: an alias cannot refer to itself." "Invalid [path] value [" + targetPath + "] for field alias [" + fullPath() + "]: an alias cannot refer to itself."
); );
} }
if (mappers.fieldTypesLookup().get(path) == null) { if (mappers.fieldTypesLookup().get(targetPath) == null) {
throw new MapperParsingException( throw new MapperParsingException(
"Invalid [path] value [" "Invalid [path] value ["
+ path + targetPath
+ "] for field alias [" + "] for field alias ["
+ name() + fullPath()
+ "]: an alias must refer to an existing field in the mappings." + "]: an alias must refer to an existing field in the mappings."
); );
} }
if (mappers.getMapper(path) instanceof FieldAliasMapper) { if (mappers.getMapper(targetPath) instanceof FieldAliasMapper) {
throw new MapperParsingException( throw new MapperParsingException(
"Invalid [path] value [" + path + "] for field alias [" + name() + "]: an alias cannot refer to another alias." "Invalid [path] value [" + targetPath + "] for field alias [" + fullPath() + "]: an alias cannot refer to another alias."
); );
} }
String aliasScope = mappers.nestedLookup().getNestedParent(name); String aliasScope = mappers.nestedLookup().getNestedParent(name);
String pathScope = mappers.nestedLookup().getNestedParent(path); String pathScope = mappers.nestedLookup().getNestedParent(targetPath);
if (Objects.equals(aliasScope, pathScope) == false) { if (Objects.equals(aliasScope, pathScope) == false) {
StringBuilder message = new StringBuilder( StringBuilder message = new StringBuilder(
"Invalid [path] value [" "Invalid [path] value ["
+ path + targetPath
+ "] for field alias [" + "] for field alias ["
+ name + name
+ "]: an alias must have the same nested scope as its target. " + "]: an alias must have the same nested scope as its target. "

View file

@ -118,7 +118,7 @@ public abstract class FieldMapper extends Mapper {
} }
@Override @Override
public String name() { public String fullPath() {
return fieldType().name(); return fieldType().name();
} }
@ -277,9 +277,9 @@ public abstract class FieldMapper extends Mapper {
indexScriptValues(searchLookup, readerContext, doc, documentParserContext); indexScriptValues(searchLookup, readerContext, doc, documentParserContext);
} catch (Exception e) { } catch (Exception e) {
if (onScriptError == OnScriptError.CONTINUE) { if (onScriptError == OnScriptError.CONTINUE) {
documentParserContext.addIgnoredField(name()); documentParserContext.addIgnoredField(fullPath());
} else { } else {
throw new DocumentParsingException(XContentLocation.UNKNOWN, "Error executing script on field [" + name() + "]", e); throw new DocumentParsingException(XContentLocation.UNKNOWN, "Error executing script on field [" + fullPath() + "]", e);
} }
} }
} }
@ -299,7 +299,7 @@ public abstract class FieldMapper extends Mapper {
int doc, int doc,
DocumentParserContext documentParserContext DocumentParserContext documentParserContext
) { ) {
throw new UnsupportedOperationException("FieldMapper " + name() + " does not support [script]"); throw new UnsupportedOperationException("FieldMapper " + fullPath() + " does not support [script]");
} }
@Override @Override
@ -321,11 +321,11 @@ public abstract class FieldMapper extends Mapper {
@Override @Override
public final void validate(MappingLookup mappers) { public final void validate(MappingLookup mappers) {
if (this.copyTo() != null && this.copyTo().copyToFields().isEmpty() == false) { if (this.copyTo() != null && this.copyTo().copyToFields().isEmpty() == false) {
if (mappers.isMultiField(this.name())) { if (mappers.isMultiField(this.fullPath())) {
throw new IllegalArgumentException("[copy_to] may not be used to copy from a multi-field: [" + this.name() + "]"); throw new IllegalArgumentException("[copy_to] may not be used to copy from a multi-field: [" + this.fullPath() + "]");
} }
final String sourceScope = mappers.nestedLookup().getNestedParent(this.name()); final String sourceScope = mappers.nestedLookup().getNestedParent(this.fullPath());
for (String copyTo : this.copyTo().copyToFields()) { for (String copyTo : this.copyTo().copyToFields()) {
if (mappers.isMultiField(copyTo)) { if (mappers.isMultiField(copyTo)) {
throw new IllegalArgumentException("[copy_to] may not be used to copy to a multi-field: [" + copyTo + "]"); throw new IllegalArgumentException("[copy_to] may not be used to copy to a multi-field: [" + copyTo + "]");
@ -381,7 +381,7 @@ public abstract class FieldMapper extends Mapper {
if (mergeWith instanceof FieldMapper == false) { if (mergeWith instanceof FieldMapper == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"mapper [" "mapper ["
+ name() + fullPath()
+ "] cannot be changed from type [" + "] cannot be changed from type ["
+ contentType() + contentType()
+ "] to [" + "] to ["
@ -395,7 +395,7 @@ public abstract class FieldMapper extends Mapper {
if (builder == null) { if (builder == null) {
return (FieldMapper) mergeWith; return (FieldMapper) mergeWith;
} }
Conflicts conflicts = new Conflicts(name()); Conflicts conflicts = new Conflicts(fullPath());
builder.merge((FieldMapper) mergeWith, conflicts, mapperMergeContext); builder.merge((FieldMapper) mergeWith, conflicts, mapperMergeContext);
conflicts.check(); conflicts.check();
return builder.build(mapperMergeContext.getMapperBuilderContext()); return builder.build(mapperMergeContext.getMapperBuilderContext());
@ -404,12 +404,12 @@ public abstract class FieldMapper extends Mapper {
protected void checkIncomingMergeType(FieldMapper mergeWith) { protected void checkIncomingMergeType(FieldMapper mergeWith) {
if (Objects.equals(this.getClass(), mergeWith.getClass()) == false) { if (Objects.equals(this.getClass(), mergeWith.getClass()) == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"mapper [" + name() + "] cannot be changed from type [" + contentType() + "] to [" + mergeWith.contentType() + "]" "mapper [" + fullPath() + "] cannot be changed from type [" + contentType() + "] to [" + mergeWith.contentType() + "]"
); );
} }
if (Objects.equals(contentType(), mergeWith.contentType()) == false) { if (Objects.equals(contentType(), mergeWith.contentType()) == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"mapper [" + name() + "] cannot be changed from type [" + contentType() + "] to [" + mergeWith.contentType() + "]" "mapper [" + fullPath() + "] cannot be changed from type [" + contentType() + "] to [" + mergeWith.contentType() + "]"
); );
} }
} }
@ -489,7 +489,7 @@ public abstract class FieldMapper extends Mapper {
if (syntheticSourceMode() == SyntheticSourceMode.FALLBACK) { if (syntheticSourceMode() == SyntheticSourceMode.FALLBACK) {
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
// Nothing because it is handled at `ObjectMapper` level. // Nothing because it is handled at `ObjectMapper` level.
@ -574,7 +574,7 @@ public abstract class FieldMapper extends Mapper {
private MultiFields(FieldMapper[] mappers) { private MultiFields(FieldMapper[] mappers) {
this.mappers = mappers; this.mappers = mappers;
// sort for consistent iteration order + serialization // sort for consistent iteration order + serialization
Arrays.sort(this.mappers, Comparator.comparing(FieldMapper::name)); Arrays.sort(this.mappers, Comparator.comparing(FieldMapper::fullPath));
} }
public void parse(FieldMapper mainField, DocumentParserContext context, Supplier<DocumentParserContext> multiFieldContextSupplier) public void parse(FieldMapper mainField, DocumentParserContext context, Supplier<DocumentParserContext> multiFieldContextSupplier)

View file

@ -55,10 +55,10 @@ final class FieldTypeLookup {
final Map<String, DynamicFieldType> dynamicFieldTypes = new HashMap<>(); final Map<String, DynamicFieldType> dynamicFieldTypes = new HashMap<>();
final Map<String, Set<String>> fieldToCopiedFields = new HashMap<>(); final Map<String, Set<String>> fieldToCopiedFields = new HashMap<>();
for (FieldMapper fieldMapper : fieldMappers) { for (FieldMapper fieldMapper : fieldMappers) {
String fieldName = fieldMapper.name(); String fieldName = fieldMapper.fullPath();
MappedFieldType fieldType = fieldMapper.fieldType(); MappedFieldType fieldType = fieldMapper.fieldType();
fullNameToFieldType.put(fieldType.name(), fieldType); fullNameToFieldType.put(fieldType.name(), fieldType);
fieldMapper.sourcePathUsedBy().forEachRemaining(mapper -> fullSubfieldNameToParentPath.put(mapper.name(), fieldName)); fieldMapper.sourcePathUsedBy().forEachRemaining(mapper -> fullSubfieldNameToParentPath.put(mapper.fullPath(), fieldName));
if (fieldType instanceof DynamicFieldType) { if (fieldType instanceof DynamicFieldType) {
dynamicFieldTypes.put(fieldType.name(), (DynamicFieldType) fieldType); dynamicFieldTypes.put(fieldType.name(), (DynamicFieldType) fieldType);
} }
@ -80,8 +80,8 @@ final class FieldTypeLookup {
this.maxParentPathDots = maxParentPathDots; this.maxParentPathDots = maxParentPathDots;
for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) { for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) {
String aliasName = fieldAliasMapper.name(); String aliasName = fieldAliasMapper.fullPath();
String path = fieldAliasMapper.path(); String path = fieldAliasMapper.targetPath();
MappedFieldType fieldType = fullNameToFieldType.get(path); MappedFieldType fieldType = fullNameToFieldType.get(path);
if (fieldType == null) { if (fieldType == null) {
continue; continue;

View file

@ -612,7 +612,7 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<GeoPoi
throws IOException { throws IOException {
super.onMalformedValue(context, malformedDataForSyntheticSource, cause); super.onMalformedValue(context, malformedDataForSyntheticSource, cause);
if (malformedDataForSyntheticSource != null) { if (malformedDataForSyntheticSource != null) {
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), malformedDataForSyntheticSource)); context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), malformedDataForSyntheticSource));
} }
} }
@ -628,15 +628,19 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<GeoPoi
} }
if (fieldType().hasDocValues() == false) { if (fieldType().hasDocValues() == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values" "field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support synthetic source because it doesn't have doc values"
); );
} }
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
return new SortedNumericDocValuesSyntheticFieldLoader(name(), leafName(), ignoreMalformed()) { return new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed()) {
final GeoPoint point = new GeoPoint(); final GeoPoint point = new GeoPoint();
@Override @Override

View file

@ -539,7 +539,7 @@ public class IpFieldMapper extends FieldMapper {
context.addIgnoredField(fieldType().name()); context.addIgnoredField(fieldType().name());
if (storeIgnored) { if (storeIgnored) {
// Save a copy of the field so synthetic source can load it // Save a copy of the field so synthetic source can load it
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser())); context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
} }
return; return;
} else { } else {
@ -594,9 +594,9 @@ public class IpFieldMapper extends FieldMapper {
@Override @Override
public void doValidate(MappingLookup lookup) { public void doValidate(MappingLookup lookup) {
if (dimension && null != lookup.nestedLookup().getNestedParent(name())) { if (dimension && null != lookup.nestedLookup().getNestedParent(fullPath())) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + fullPath() + "]"
); );
} }
} }
@ -613,15 +613,19 @@ public class IpFieldMapper extends FieldMapper {
} }
if (hasDocValues == false) { if (hasDocValues == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values" "field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support synthetic source because it doesn't have doc values"
); );
} }
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
return new SortedSetDocValuesSyntheticFieldLoader(name(), leafName(), null, ignoreMalformed) { return new SortedSetDocValuesSyntheticFieldLoader(fullPath(), leafName(), null, ignoreMalformed) {
@Override @Override
protected BytesRef convert(BytesRef value) { protected BytesRef convert(BytesRef value) {
byte[] bytes = Arrays.copyOfRange(value.bytes, value.offset, value.offset + value.length); byte[] bytes = Arrays.copyOfRange(value.bytes, value.offset, value.offset + value.length);

View file

@ -911,7 +911,7 @@ public final class KeywordFieldMapper extends FieldMapper {
} }
if (value.length() > fieldType().ignoreAbove()) { if (value.length() > fieldType().ignoreAbove()) {
context.addIgnoredField(name()); context.addIgnoredField(fullPath());
if (storeIgnored) { if (storeIgnored) {
// Save a copy of the field so synthetic source can load it // Save a copy of the field so synthetic source can load it
context.doc().add(new StoredField(originalName(), new BytesRef(value))); context.doc().add(new StoredField(originalName(), new BytesRef(value)));
@ -919,7 +919,7 @@ public final class KeywordFieldMapper extends FieldMapper {
return; return;
} }
value = normalizeValue(fieldType().normalizer(), name(), value); value = normalizeValue(fieldType().normalizer(), fullPath(), value);
// convert to utf8 only once before feeding postings/dv/stored fields // convert to utf8 only once before feeding postings/dv/stored fields
final BytesRef binaryValue = new BytesRef(value); final BytesRef binaryValue = new BytesRef(value);
@ -1000,9 +1000,9 @@ public final class KeywordFieldMapper extends FieldMapper {
@Override @Override
public void doValidate(MappingLookup lookup) { public void doValidate(MappingLookup lookup) {
if (fieldType().isDimension() && null != lookup.nestedLookup().getNestedParent(name())) { if (fieldType().isDimension() && null != lookup.nestedLookup().getNestedParent(fullPath())) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + fullPath() + "]"
); );
} }
} }
@ -1017,7 +1017,7 @@ public final class KeywordFieldMapper extends FieldMapper {
* for synthetic source. * for synthetic source.
*/ */
private String originalName() { private String originalName() {
return name() + "._original"; return fullPath() + "._original";
} }
@Override @Override
@ -1036,17 +1036,17 @@ public final class KeywordFieldMapper extends FieldMapper {
} }
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
if (hasNormalizer()) { if (hasNormalizer()) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares a normalizer" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares a normalizer"
); );
} }
if (fieldType.stored()) { if (fieldType.stored()) {
return new StringStoredFieldFieldLoader( return new StringStoredFieldFieldLoader(
name(), fullPath(),
simpleName, simpleName,
fieldType().ignoreAbove == Defaults.IGNORE_ABOVE ? null : originalName() fieldType().ignoreAbove == Defaults.IGNORE_ABOVE ? null : originalName()
) { ) {
@ -1060,14 +1060,14 @@ public final class KeywordFieldMapper extends FieldMapper {
if (hasDocValues == false) { if (hasDocValues == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" "field ["
+ name() + fullPath()
+ "] of type [" + "] of type ["
+ typeName() + typeName()
+ "] doesn't support synthetic source because it doesn't have doc values and isn't stored" + "] doesn't support synthetic source because it doesn't have doc values and isn't stored"
); );
} }
return new SortedSetDocValuesSyntheticFieldLoader( return new SortedSetDocValuesSyntheticFieldLoader(
name(), fullPath(),
simpleName, simpleName,
fieldType().ignoreAbove == Defaults.IGNORE_ABOVE ? null : originalName(), fieldType().ignoreAbove == Defaults.IGNORE_ABOVE ? null : originalName(),
false false

View file

@ -60,13 +60,17 @@ public abstract class Mapper implements ToXContentFragment, Iterable<Mapper> {
this.leafName = internFieldName(leafName); this.leafName = internFieldName(leafName);
} }
/**
* Returns the name of the field.
* When the field has a parent object, its leaf name won't include the entire path.
* When subobjects are disabled, its leaf name will be the same as {@link #fullPath()} in practice, because its parent is the root.
*/
public final String leafName() { public final String leafName() {
return leafName; return leafName;
} }
/** Returns the canonical name which uniquely identifies the mapper against other mappers in a type. */ /** Returns the canonical name which uniquely identifies the mapper against other mappers in a type. */
// TODO rename this to fullPath??? public abstract String fullPath();
public abstract String name();
/** /**
* Returns a name representing the type of this mapper. * Returns a name representing the type of this mapper.
@ -94,7 +98,7 @@ public abstract class Mapper implements ToXContentFragment, Iterable<Mapper> {
* fields properly. * fields properly.
*/ */
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
throw new IllegalArgumentException("field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source"); throw new IllegalArgumentException("field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source");
} }
@Override @Override

View file

@ -52,11 +52,11 @@ public final class Mapping implements ToXContentFragment {
for (int i = 0; i < metadataMappers.length; i++) { for (int i = 0; i < metadataMappers.length; i++) {
MetadataFieldMapper metadataMapper = metadataMappers[i]; MetadataFieldMapper metadataMapper = metadataMappers[i];
metadataMappersMap[i] = Map.entry(metadataMapper.getClass(), metadataMapper); metadataMappersMap[i] = Map.entry(metadataMapper.getClass(), metadataMapper);
metadataMappersByName[i] = Map.entry(metadataMapper.name(), metadataMapper); metadataMappersByName[i] = Map.entry(metadataMapper.fullPath(), metadataMapper);
} }
this.root = rootObjectMapper; this.root = rootObjectMapper;
// keep root mappers sorted for consistent serialization // keep root mappers sorted for consistent serialization
Arrays.sort(metadataMappers, Comparator.comparing(Mapper::name)); Arrays.sort(metadataMappers, Comparator.comparing(Mapper::fullPath));
this.metadataMappersMap = Map.ofEntries(metadataMappersMap); this.metadataMappersMap = Map.ofEntries(metadataMappersMap);
this.metadataMappersByName = Map.ofEntries(metadataMappersByName); this.metadataMappersByName = Map.ofEntries(metadataMappersByName);
this.meta = meta; this.meta = meta;
@ -70,7 +70,7 @@ public final class Mapping implements ToXContentFragment {
try { try {
return new CompressedXContent(this); return new CompressedXContent(this);
} catch (Exception e) { } catch (Exception e) {
throw new ElasticsearchGenerationException("failed to serialize source for type [" + root.name() + "]", e); throw new ElasticsearchGenerationException("failed to serialize source for type [" + root.fullPath() + "]", e);
} }
} }

View file

@ -163,27 +163,27 @@ public final class MappingLookup {
final Set<String> completionFields = new HashSet<>(); final Set<String> completionFields = new HashSet<>();
final List<FieldMapper> indexTimeScriptMappers = new ArrayList<>(); final List<FieldMapper> indexTimeScriptMappers = new ArrayList<>();
for (FieldMapper mapper : mappers) { for (FieldMapper mapper : mappers) {
if (objects.containsKey(mapper.name())) { if (objects.containsKey(mapper.fullPath())) {
throw new MapperParsingException("Field [" + mapper.name() + "] is defined both as an object and a field"); throw new MapperParsingException("Field [" + mapper.fullPath() + "] is defined both as an object and a field");
} }
if (fieldMappers.put(mapper.name(), mapper) != null) { if (fieldMappers.put(mapper.fullPath(), mapper) != null) {
throw new MapperParsingException("Field [" + mapper.name() + "] is defined more than once"); throw new MapperParsingException("Field [" + mapper.fullPath() + "] is defined more than once");
} }
indexAnalyzersMap.putAll(mapper.indexAnalyzers()); indexAnalyzersMap.putAll(mapper.indexAnalyzers());
if (mapper.hasScript()) { if (mapper.hasScript()) {
indexTimeScriptMappers.add(mapper); indexTimeScriptMappers.add(mapper);
} }
if (mapper instanceof CompletionFieldMapper) { if (mapper instanceof CompletionFieldMapper) {
completionFields.add(mapper.name()); completionFields.add(mapper.fullPath());
} }
} }
for (FieldAliasMapper aliasMapper : aliasMappers) { for (FieldAliasMapper aliasMapper : aliasMappers) {
if (objects.containsKey(aliasMapper.name())) { if (objects.containsKey(aliasMapper.fullPath())) {
throw new MapperParsingException("Alias [" + aliasMapper.name() + "] is defined both as an object and an alias"); throw new MapperParsingException("Alias [" + aliasMapper.fullPath() + "] is defined both as an object and an alias");
} }
if (fieldMappers.put(aliasMapper.name(), aliasMapper) != null) { if (fieldMappers.put(aliasMapper.fullPath(), aliasMapper) != null) {
throw new MapperParsingException("Alias [" + aliasMapper.name() + "] is defined both as an alias and a concrete field"); throw new MapperParsingException("Alias [" + aliasMapper.fullPath() + "] is defined both as an alias and a concrete field");
} }
} }
@ -194,7 +194,7 @@ public final class MappingLookup {
Map<String, InferenceFieldMetadata> inferenceFields = new HashMap<>(); Map<String, InferenceFieldMetadata> inferenceFields = new HashMap<>();
for (FieldMapper mapper : mappers) { for (FieldMapper mapper : mappers) {
if (mapper instanceof InferenceFieldMapper inferenceFieldMapper) { if (mapper instanceof InferenceFieldMapper inferenceFieldMapper) {
inferenceFields.put(mapper.name(), inferenceFieldMapper.getMetadata(fieldTypeLookup.sourcePaths(mapper.name()))); inferenceFields.put(mapper.fullPath(), inferenceFieldMapper.getMetadata(fieldTypeLookup.sourcePaths(mapper.fullPath())));
} }
} }
this.inferenceFields = Map.copyOf(inferenceFields); this.inferenceFields = Map.copyOf(inferenceFields);
@ -225,7 +225,7 @@ public final class MappingLookup {
private static void assertNamesInterned(String name, Mapper mapper) { private static void assertNamesInterned(String name, Mapper mapper) {
assert name == name.intern(); assert name == name.intern();
assert mapper.name() == mapper.name().intern(); assert mapper.fullPath() == mapper.fullPath().intern();
assert mapper.leafName() == mapper.leafName().intern(); assert mapper.leafName() == mapper.leafName().intern();
if (mapper instanceof ObjectMapper) { if (mapper instanceof ObjectMapper) {
((ObjectMapper) mapper).mappers.forEach(MappingLookup::assertNamesInterned); ((ObjectMapper) mapper).mappers.forEach(MappingLookup::assertNamesInterned);

View file

@ -197,7 +197,7 @@ public abstract class MetadataFieldMapper extends FieldMapper {
protected void parseCreateField(DocumentParserContext context) throws IOException { protected void parseCreateField(DocumentParserContext context) throws IOException {
throw new DocumentParsingException( throw new DocumentParsingException(
context.parser().getTokenLocation(), context.parser().getTokenLocation(),
"Field [" + name() + "] is a metadata field and cannot be added inside a document. Use the index API request parameters." "Field [" + fullPath() + "] is a metadata field and cannot be added inside a document. Use the index API request parameters."
); );
} }

View file

@ -82,20 +82,20 @@ public interface NestedLookup {
if (mappers == null || mappers.isEmpty()) { if (mappers == null || mappers.isEmpty()) {
return NestedLookup.EMPTY; return NestedLookup.EMPTY;
} }
mappers = mappers.stream().sorted(Comparator.comparing(ObjectMapper::name)).toList(); mappers = mappers.stream().sorted(Comparator.comparing(ObjectMapper::fullPath)).toList();
Map<String, Query> parentFilters = new HashMap<>(); Map<String, Query> parentFilters = new HashMap<>();
Map<String, NestedObjectMapper> mappersByName = new HashMap<>(); Map<String, NestedObjectMapper> mappersByName = new HashMap<>();
NestedObjectMapper previous = null; NestedObjectMapper previous = null;
for (NestedObjectMapper mapper : mappers) { for (NestedObjectMapper mapper : mappers) {
mappersByName.put(mapper.name(), mapper); mappersByName.put(mapper.fullPath(), mapper);
if (previous != null) { if (previous != null) {
if (mapper.name().startsWith(previous.name() + ".")) { if (mapper.fullPath().startsWith(previous.fullPath() + ".")) {
parentFilters.put(previous.name(), previous.nestedTypeFilter()); parentFilters.put(previous.fullPath(), previous.nestedTypeFilter());
} }
} }
previous = mapper; previous = mapper;
} }
List<String> nestedPathNames = mappers.stream().map(NestedObjectMapper::name).toList(); List<String> nestedPathNames = mappers.stream().map(NestedObjectMapper::fullPath).toList();
return new NestedLookup() { return new NestedLookup() {

View file

@ -305,7 +305,7 @@ public class NestedObjectMapper extends ObjectMapper {
@Override @Override
public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) {
if ((mergeWith instanceof NestedObjectMapper) == false) { if ((mergeWith instanceof NestedObjectMapper) == false) {
MapperErrors.throwNestedMappingConflictError(mergeWith.name()); MapperErrors.throwNestedMappingConflictError(mergeWith.fullPath());
} }
NestedObjectMapper mergeWithObject = (NestedObjectMapper) mergeWith; NestedObjectMapper mergeWithObject = (NestedObjectMapper) mergeWith;
@ -481,7 +481,7 @@ public class NestedObjectMapper extends ObjectMapper {
@Override @Override
public String fieldName() { public String fieldName() {
return name(); return NestedObjectMapper.this.fullPath();
} }
} }
} }

View file

@ -1900,7 +1900,7 @@ public class NumberFieldMapper extends FieldMapper {
context.addIgnoredField(mappedFieldType.name()); context.addIgnoredField(mappedFieldType.name());
if (storeMalformedFields) { if (storeMalformedFields) {
// Save a copy of the field so synthetic source can load it // Save a copy of the field so synthetic source can load it
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser())); context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
} }
return; return;
} else { } else {
@ -1979,9 +1979,9 @@ public class NumberFieldMapper extends FieldMapper {
@Override @Override
public void doValidate(MappingLookup lookup) { public void doValidate(MappingLookup lookup) {
if (dimension && null != lookup.nestedLookup().getNestedParent(name())) { if (dimension && null != lookup.nestedLookup().getNestedParent(fullPath())) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + fullPath() + "]"
); );
} }
} }
@ -1998,15 +1998,19 @@ public class NumberFieldMapper extends FieldMapper {
} }
if (hasDocValues == false) { if (hasDocValues == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values" "field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support synthetic source because it doesn't have doc values"
); );
} }
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
return type.syntheticFieldLoader(name(), leafName(), ignoreMalformed.value()); return type.syntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value());
} }
// For testing only: // For testing only:

View file

@ -433,7 +433,7 @@ public class ObjectMapper extends Mapper {
} }
@Override @Override
public String name() { public String fullPath() {
return this.fullPath; return this.fullPath;
} }
@ -459,10 +459,6 @@ public class ObjectMapper extends Mapper {
return mappers.values().iterator(); return mappers.values().iterator();
} }
public String fullPath() {
return this.fullPath;
}
public final Dynamic dynamic() { public final Dynamic dynamic() {
return dynamic; return dynamic;
} }
@ -492,11 +488,11 @@ public class ObjectMapper extends Mapper {
@Override @Override
public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) {
if (mergeWith instanceof ObjectMapper == false) { if (mergeWith instanceof ObjectMapper == false) {
MapperErrors.throwObjectMappingConflictError(mergeWith.name()); MapperErrors.throwObjectMappingConflictError(mergeWith.fullPath());
} }
if (this instanceof NestedObjectMapper == false && mergeWith instanceof NestedObjectMapper) { if (this instanceof NestedObjectMapper == false && mergeWith instanceof NestedObjectMapper) {
// TODO stop NestedObjectMapper extending ObjectMapper? // TODO stop NestedObjectMapper extending ObjectMapper?
MapperErrors.throwNestedMappingConflictError(mergeWith.name()); MapperErrors.throwNestedMappingConflictError(mergeWith.fullPath());
} }
var mergeResult = MergeResult.build(this, (ObjectMapper) mergeWith, parentMergeContext); var mergeResult = MergeResult.build(this, (ObjectMapper) mergeWith, parentMergeContext);
return new ObjectMapper( return new ObjectMapper(
@ -524,7 +520,9 @@ public class ObjectMapper extends Mapper {
if (reason == MergeReason.INDEX_TEMPLATE) { if (reason == MergeReason.INDEX_TEMPLATE) {
enabled = mergeWithObject.enabled; enabled = mergeWithObject.enabled;
} else if (existing.isEnabled() != mergeWithObject.isEnabled()) { } else if (existing.isEnabled() != mergeWithObject.isEnabled()) {
throw new MapperException("the [enabled] parameter can't be updated for the object mapping [" + existing.name() + "]"); throw new MapperException(
"the [enabled] parameter can't be updated for the object mapping [" + existing.fullPath() + "]"
);
} else { } else {
enabled = existing.enabled; enabled = existing.enabled;
} }
@ -537,7 +535,7 @@ public class ObjectMapper extends Mapper {
subObjects = mergeWithObject.subobjects; subObjects = mergeWithObject.subobjects;
} else if (existing.subobjects != mergeWithObject.subobjects) { } else if (existing.subobjects != mergeWithObject.subobjects) {
throw new MapperException( throw new MapperException(
"the [subobjects] parameter can't be updated for the object mapping [" + existing.name() + "]" "the [subobjects] parameter can't be updated for the object mapping [" + existing.fullPath() + "]"
); );
} else { } else {
subObjects = existing.subobjects; subObjects = existing.subobjects;
@ -551,7 +549,7 @@ public class ObjectMapper extends Mapper {
trackArraySource = mergeWithObject.storeArraySource; trackArraySource = mergeWithObject.storeArraySource;
} else if (existing.storeArraySource != mergeWithObject.storeArraySource) { } else if (existing.storeArraySource != mergeWithObject.storeArraySource) {
throw new MapperException( throw new MapperException(
"the [store_array_source] parameter can't be updated for the object mapping [" + existing.name() + "]" "the [store_array_source] parameter can't be updated for the object mapping [" + existing.fullPath() + "]"
); );
} else { } else {
trackArraySource = existing.storeArraySource; trackArraySource = existing.storeArraySource;
@ -606,9 +604,9 @@ public class ObjectMapper extends Mapper {
} else { } else {
assert mergeIntoMapper instanceof FieldMapper || mergeIntoMapper instanceof FieldAliasMapper; assert mergeIntoMapper instanceof FieldMapper || mergeIntoMapper instanceof FieldAliasMapper;
if (mergeWithMapper instanceof NestedObjectMapper) { if (mergeWithMapper instanceof NestedObjectMapper) {
MapperErrors.throwNestedMappingConflictError(mergeWithMapper.name()); MapperErrors.throwNestedMappingConflictError(mergeWithMapper.fullPath());
} else if (mergeWithMapper instanceof ObjectMapper) { } else if (mergeWithMapper instanceof ObjectMapper) {
MapperErrors.throwObjectMappingConflictError(mergeWithMapper.name()); MapperErrors.throwObjectMappingConflictError(mergeWithMapper.fullPath());
} }
// If we're merging template mappings when creating an index, then a field definition always // If we're merging template mappings when creating an index, then a field definition always
@ -736,7 +734,7 @@ public class ObjectMapper extends Mapper {
protected void serializeMappers(XContentBuilder builder, Params params) throws IOException { protected void serializeMappers(XContentBuilder builder, Params params) throws IOException {
// sort the mappers so we get consistent serialization format // sort the mappers so we get consistent serialization format
Mapper[] sortedMappers = mappers.values().toArray(Mapper[]::new); Mapper[] sortedMappers = mappers.values().toArray(Mapper[]::new);
Arrays.sort(sortedMappers, Comparator.comparing(Mapper::name)); Arrays.sort(sortedMappers, Comparator.comparing(Mapper::fullPath));
int count = 0; int count = 0;
for (Mapper mapper : sortedMappers) { for (Mapper mapper : sortedMappers) {
@ -757,7 +755,7 @@ public class ObjectMapper extends Mapper {
} }
protected SourceLoader.SyntheticFieldLoader syntheticFieldLoader(Stream<Mapper> mappers, boolean isFragment) { protected SourceLoader.SyntheticFieldLoader syntheticFieldLoader(Stream<Mapper> mappers, boolean isFragment) {
var fields = mappers.sorted(Comparator.comparing(Mapper::name)) var fields = mappers.sorted(Comparator.comparing(Mapper::fullPath))
.map(Mapper::syntheticFieldLoader) .map(Mapper::syntheticFieldLoader)
.filter(l -> l != SourceLoader.SyntheticFieldLoader.NOTHING) .filter(l -> l != SourceLoader.SyntheticFieldLoader.NOTHING)
.toList(); .toList();
@ -887,7 +885,7 @@ public class ObjectMapper extends Mapper {
if (objectsWithIgnoredFields == null || objectsWithIgnoredFields.isEmpty()) { if (objectsWithIgnoredFields == null || objectsWithIgnoredFields.isEmpty()) {
return false; return false;
} }
ignoredValues = objectsWithIgnoredFields.remove(name()); ignoredValues = objectsWithIgnoredFields.remove(ObjectMapper.this.fullPath());
hasValue |= ignoredValues != null; hasValue |= ignoredValues != null;
for (SourceLoader.SyntheticFieldLoader loader : fields) { for (SourceLoader.SyntheticFieldLoader loader : fields) {
hasValue |= loader.setIgnoredValues(objectsWithIgnoredFields); hasValue |= loader.setIgnoredValues(objectsWithIgnoredFields);
@ -897,7 +895,7 @@ public class ObjectMapper extends Mapper {
@Override @Override
public String fieldName() { public String fieldName() {
return name(); return ObjectMapper.this.fullPath();
} }
} }

View file

@ -142,7 +142,7 @@ public class PassThroughObjectMapper extends ObjectMapper {
@Override @Override
public PassThroughObjectMapper merge(Mapper mergeWith, MapperMergeContext parentBuilderContext) { public PassThroughObjectMapper merge(Mapper mergeWith, MapperMergeContext parentBuilderContext) {
if (mergeWith instanceof PassThroughObjectMapper == false) { if (mergeWith instanceof PassThroughObjectMapper == false) {
MapperErrors.throwObjectMappingConflictError(mergeWith.name()); MapperErrors.throwObjectMappingConflictError(mergeWith.fullPath());
} }
PassThroughObjectMapper mergeWithObject = (PassThroughObjectMapper) mergeWith; PassThroughObjectMapper mergeWithObject = (PassThroughObjectMapper) mergeWith;
@ -216,11 +216,11 @@ public class PassThroughObjectMapper extends ObjectMapper {
public static void checkForDuplicatePriorities(Collection<PassThroughObjectMapper> passThroughMappers) { public static void checkForDuplicatePriorities(Collection<PassThroughObjectMapper> passThroughMappers) {
Map<Integer, String> seen = new HashMap<>(); Map<Integer, String> seen = new HashMap<>();
for (PassThroughObjectMapper mapper : passThroughMappers) { for (PassThroughObjectMapper mapper : passThroughMappers) {
String conflict = seen.put(mapper.priority, mapper.name()); String conflict = seen.put(mapper.priority, mapper.fullPath());
if (conflict != null) { if (conflict != null) {
throw new MapperException( throw new MapperException(
"Pass-through object [" "Pass-through object ["
+ mapper.name() + mapper.fullPath()
+ "] has a conflicting param [priority=" + "] has a conflicting param [priority="
+ mapper.priority + mapper.priority
+ "] with object [" + "] with object ["

View file

@ -390,7 +390,7 @@ public class RangeFieldMapper extends FieldMapper {
} }
Range range = parseRange(parser); Range range = parseRange(parser);
context.doc().addAll(fieldType().rangeType.createFields(context, name(), range, index, hasDocValues, store)); context.doc().addAll(fieldType().rangeType.createFields(context, fullPath(), range, index, hasDocValues, store));
if (hasDocValues == false && (index || store)) { if (hasDocValues == false && (index || store)) {
context.addToFieldNames(fieldType().name()); context.addToFieldNames(fieldType().name());
@ -406,7 +406,7 @@ public class RangeFieldMapper extends FieldMapper {
if (start != XContentParser.Token.START_OBJECT) { if (start != XContentParser.Token.START_OBJECT) {
throw new DocumentParsingException( throw new DocumentParsingException(
parser.getTokenLocation(), parser.getTokenLocation(),
"error parsing field [" + name() + "], expected an object but got " + parser.currentName() "error parsing field [" + fullPath() + "], expected an object but got " + parser.currentName()
); );
} }
@ -445,7 +445,7 @@ public class RangeFieldMapper extends FieldMapper {
} else { } else {
throw new DocumentParsingException( throw new DocumentParsingException(
parser.getTokenLocation(), parser.getTokenLocation(),
"error parsing field [" + name() + "], with unknown parameter [" + fieldName + "]" "error parsing field [" + fullPath() + "], with unknown parameter [" + fieldName + "]"
); );
} }
} }
@ -471,15 +471,19 @@ public class RangeFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (hasDocValues == false) { if (hasDocValues == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values" "field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support synthetic source because it doesn't have doc values"
); );
} }
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
return new BinaryDocValuesSyntheticFieldLoader(name()) { return new BinaryDocValuesSyntheticFieldLoader(fullPath()) {
@Override @Override
protected void writeValue(XContentBuilder b, BytesRef value) throws IOException { protected void writeValue(XContentBuilder b, BytesRef value) throws IOException {
List<Range> ranges = type.decodeRanges(value); List<Range> ranges = type.decodeRanges(value);

View file

@ -152,7 +152,7 @@ public class RootObjectMapper extends ObjectMapper {
@Override @Override
public RootObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) { public RootObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) {
RootObjectMapper.Builder builder = new RootObjectMapper.Builder(name(), subobjects); RootObjectMapper.Builder builder = new RootObjectMapper.Builder(this.fullPath(), subobjects);
builder.enabled = enabled; builder.enabled = enabled;
builder.dynamic = dynamic; builder.dynamic = dynamic;
return builder; return builder;
@ -220,7 +220,7 @@ public class RootObjectMapper extends ObjectMapper {
@Override @Override
public RootObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { public RootObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) {
if (mergeWith instanceof RootObjectMapper == false) { if (mergeWith instanceof RootObjectMapper == false) {
MapperErrors.throwObjectMappingConflictError(mergeWith.name()); MapperErrors.throwObjectMappingConflictError(mergeWith.fullPath());
} }
RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith; RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith;

View file

@ -476,8 +476,8 @@ public final class TextFieldMapper extends FieldMapper {
SubFieldInfo phraseFieldInfo = buildPhraseInfo(fieldType, tft); SubFieldInfo phraseFieldInfo = buildPhraseInfo(fieldType, tft);
SubFieldInfo prefixFieldInfo = buildPrefixInfo(context, fieldType, tft); SubFieldInfo prefixFieldInfo = buildPrefixInfo(context, fieldType, tft);
for (Mapper mapper : multiFields) { for (Mapper mapper : multiFields) {
if (mapper.name().endsWith(FAST_PHRASE_SUFFIX) || mapper.name().endsWith(FAST_PREFIX_SUFFIX)) { if (mapper.fullPath().endsWith(FAST_PHRASE_SUFFIX) || mapper.fullPath().endsWith(FAST_PREFIX_SUFFIX)) {
throw new MapperParsingException("Cannot use reserved field name [" + mapper.name() + "]"); throw new MapperParsingException("Cannot use reserved field name [" + mapper.fullPath() + "]");
} }
} }
return new TextFieldMapper(leafName(), fieldType, tft, prefixFieldInfo, phraseFieldInfo, multiFields, copyTo, this); return new TextFieldMapper(leafName(), fieldType, tft, prefixFieldInfo, phraseFieldInfo, multiFields, copyTo, this);
@ -1223,7 +1223,7 @@ public final class TextFieldMapper extends FieldMapper {
assert mappedFieldType.getTextSearchInfo().isTokenized(); assert mappedFieldType.getTextSearchInfo().isTokenized();
assert mappedFieldType.hasDocValues() == false; assert mappedFieldType.hasDocValues() == false;
if (fieldType.indexOptions() == IndexOptions.NONE && fieldType().fielddata()) { if (fieldType.indexOptions() == IndexOptions.NONE && fieldType().fielddata()) {
throw new IllegalArgumentException("Cannot enable fielddata on a [text] field that is not indexed: [" + name() + "]"); throw new IllegalArgumentException("Cannot enable fielddata on a [text] field that is not indexed: [" + fullPath() + "]");
} }
this.fieldType = freezeAndDeduplicateFieldType(fieldType); this.fieldType = freezeAndDeduplicateFieldType(fieldType);
this.prefixFieldInfo = prefixFieldInfo; this.prefixFieldInfo = prefixFieldInfo;
@ -1247,7 +1247,7 @@ public final class TextFieldMapper extends FieldMapper {
@Override @Override
public Map<String, NamedAnalyzer> indexAnalyzers() { public Map<String, NamedAnalyzer> indexAnalyzers() {
Map<String, NamedAnalyzer> analyzersMap = new HashMap<>(); Map<String, NamedAnalyzer> analyzersMap = new HashMap<>();
analyzersMap.put(name(), indexAnalyzer); analyzersMap.put(fullPath(), indexAnalyzer);
if (phraseFieldInfo != null) { if (phraseFieldInfo != null) {
analyzersMap.put( analyzersMap.put(
phraseFieldInfo.field, phraseFieldInfo.field,
@ -1455,11 +1455,11 @@ public final class TextFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
if (store) { if (store) {
return new StringStoredFieldFieldLoader(name(), leafName(), null) { return new StringStoredFieldFieldLoader(fullPath(), leafName(), null) {
@Override @Override
protected void write(XContentBuilder b, Object value) throws IOException { protected void write(XContentBuilder b, Object value) throws IOException {
b.value((String) value); b.value((String) value);
@ -1477,7 +1477,7 @@ public final class TextFieldMapper extends FieldMapper {
Locale.ROOT, Locale.ROOT,
"field [%s] of type [%s] doesn't support synthetic source unless it is stored or has a sub-field of" "field [%s] of type [%s] doesn't support synthetic source unless it is stored or has a sub-field of"
+ " type [keyword] with doc values or stored and without a normalizer", + " type [keyword] with doc values or stored and without a normalizer",
name(), fullPath(),
typeName() typeName()
) )
); );

View file

@ -815,11 +815,11 @@ public final class FlattenedFieldMapper extends FieldMapper {
return SourceLoader.SyntheticFieldLoader.NOTHING; return SourceLoader.SyntheticFieldLoader.NOTHING;
} }
if (fieldType().hasDocValues()) { if (fieldType().hasDocValues()) {
return new FlattenedSortedSetDocValuesSyntheticFieldLoader(name() + "._keyed", leafName()); return new FlattenedSortedSetDocValuesSyntheticFieldLoader(fullPath() + "._keyed", leafName());
} }
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
); );
} }
} }

View file

@ -1641,7 +1641,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
if (context.doc().getByKey(fieldType().name()) != null) { if (context.doc().getByKey(fieldType().name()) != null) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Field [" "Field ["
+ name() + fullPath()
+ "] of type [" + "] of type ["
+ typeName() + typeName()
+ "] doesn't support indexing multiple values for the same field in the same document" + "] doesn't support indexing multiple values for the same field in the same document"
@ -1715,7 +1715,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
"The [" "The ["
+ typeName() + typeName()
+ "] field [" + "] field ["
+ name() + fullPath()
+ "] in doc [" + "] in doc ["
+ context.documentDescription() + context.documentDescription()
+ "] has more dimensions " + "] has more dimensions "
@ -1732,7 +1732,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
"The [" "The ["
+ typeName() + typeName()
+ "] field [" + "] field ["
+ name() + fullPath()
+ "] in doc [" + "] in doc ["
+ context.documentDescription() + context.documentDescription()
+ "] has a different number of dimensions " + "] has a different number of dimensions "
@ -1817,7 +1817,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
if (fieldType().indexed) { if (fieldType().indexed) {
@ -1848,10 +1848,10 @@ public class DenseVectorFieldMapper extends FieldMapper {
@Override @Override
public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException { public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException {
values = leafReader.getFloatVectorValues(name()); values = leafReader.getFloatVectorValues(fullPath());
if (values != null) { if (values != null) {
if (indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) && VectorSimilarity.COSINE.equals(vectorSimilarity)) { if (indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) && VectorSimilarity.COSINE.equals(vectorSimilarity)) {
magnitudeReader = leafReader.getNumericDocValues(name() + COSINE_MAGNITUDE_FIELD_SUFFIX); magnitudeReader = leafReader.getNumericDocValues(fullPath() + COSINE_MAGNITUDE_FIELD_SUFFIX);
} }
return docId -> { return docId -> {
hasValue = docId == values.advance(docId); hasValue = docId == values.advance(docId);
@ -1859,7 +1859,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
return hasValue; return hasValue;
}; };
} }
byteVectorValues = leafReader.getByteVectorValues(name()); byteVectorValues = leafReader.getByteVectorValues(fullPath());
if (byteVectorValues != null) { if (byteVectorValues != null) {
return docId -> { return docId -> {
hasValue = docId == byteVectorValues.advance(docId); hasValue = docId == byteVectorValues.advance(docId);
@ -1903,7 +1903,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
@Override @Override
public String fieldName() { public String fieldName() {
return name(); return fullPath();
} }
} }
@ -1923,7 +1923,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
@Override @Override
public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException { public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException {
values = leafReader.getBinaryDocValues(name()); values = leafReader.getBinaryDocValues(fullPath());
if (values == null) { if (values == null) {
return null; return null;
} }
@ -1958,7 +1958,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
@Override @Override
public String fieldName() { public String fieldName() {
return name(); return fullPath();
} }
} }
} }

View file

@ -186,14 +186,14 @@ public class SparseVectorFieldMapper extends FieldMapper {
} else if (token == Token.VALUE_NULL) { } else if (token == Token.VALUE_NULL) {
// ignore feature, this is consistent with numeric fields // ignore feature, this is consistent with numeric fields
} else if (token == Token.VALUE_NUMBER || token == Token.VALUE_STRING) { } else if (token == Token.VALUE_NUMBER || token == Token.VALUE_STRING) {
final String key = name() + "." + feature; final String key = fullPath() + "." + feature;
float value = context.parser().floatValue(true); float value = context.parser().floatValue(true);
// if we have an existing feature of the same name we'll select for the one with the max value // if we have an existing feature of the same name we'll select for the one with the max value
// based on recommendations from this paper: https://arxiv.org/pdf/2305.18494.pdf // based on recommendations from this paper: https://arxiv.org/pdf/2305.18494.pdf
IndexableField currentField = context.doc().getByKey(key); IndexableField currentField = context.doc().getByKey(key);
if (currentField == null) { if (currentField == null) {
context.doc().addWithKey(key, new FeatureField(name(), feature, value)); context.doc().addWithKey(key, new FeatureField(fullPath(), feature, value));
} else if (currentField instanceof FeatureField && ((FeatureField) currentField).getFeatureValue() < value) { } else if (currentField instanceof FeatureField && ((FeatureField) currentField).getFeatureValue() < value) {
((FeatureField) currentField).setFeatureValue(value); ((FeatureField) currentField).setFeatureValue(value);
} }

View file

@ -943,8 +943,8 @@ final class DefaultSearchContext extends SearchContext {
// to the routing path. // to the routing path.
Set<String> matchingRoutingPaths = new TreeSet<>(routingPaths); Set<String> matchingRoutingPaths = new TreeSet<>(routingPaths);
for (Mapper mapper : indexService.mapperService().mappingLookup().fieldMappers()) { for (Mapper mapper : indexService.mapperService().mappingLookup().fieldMappers()) {
if (mapper instanceof KeywordFieldMapper && indexRouting.matchesField(mapper.name())) { if (mapper instanceof KeywordFieldMapper && indexRouting.matchesField(mapper.fullPath())) {
matchingRoutingPaths.add(mapper.name()); matchingRoutingPaths.add(mapper.fullPath());
} }
} }
routingPaths = new ArrayList<>(matchingRoutingPaths); routingPaths = new ArrayList<>(matchingRoutingPaths);

View file

@ -248,7 +248,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); Mapper fieldMapper = defaultMapper.mappers().getMapper("field");
ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.field("field", "suggestion"))); ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.field("field", "suggestion")));
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertFieldsOfType(fields); assertFieldsOfType(fields);
} }
@ -495,7 +495,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.array("field", "suggestion1", "suggestion2"))); ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.array("field", "suggestion1", "suggestion2")));
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertThat(fields, containsInAnyOrder(suggestField("suggestion1"), suggestField("suggestion2"))); assertThat(fields, containsInAnyOrder(suggestField("suggestion1"), suggestField("suggestion2")));
} }
@ -512,7 +512,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
b.endObject(); b.endObject();
})); }));
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertThat(fields, containsInAnyOrder(suggestField("suggestion"))); assertThat(fields, containsInAnyOrder(suggestField("suggestion")));
} }
@ -529,7 +529,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
b.endObject(); b.endObject();
})); }));
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertThat(fields, containsInAnyOrder(suggestField("suggestion1"), suggestField("suggestion2"), suggestField("suggestion3"))); assertThat(fields, containsInAnyOrder(suggestField("suggestion1"), suggestField("suggestion2"), suggestField("suggestion3")));
} }
@ -566,7 +566,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
b.endObject(); b.endObject();
})); }));
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertFieldsOfType(fields); assertFieldsOfType(fields);
} }
@ -584,7 +584,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
b.endArray(); b.endArray();
})); }));
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertThat(fields, containsInAnyOrder(suggestField("suggestion1"), suggestField("suggestion2"), suggestField("suggestion3"))); assertThat(fields, containsInAnyOrder(suggestField("suggestion1"), suggestField("suggestion2"), suggestField("suggestion3")));
} }
@ -617,7 +617,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
b.endArray(); b.endArray();
})); }));
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertThat( assertThat(
fields, fields,
containsInAnyOrder( containsInAnyOrder(

View file

@ -1568,7 +1568,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
b.endObject(); b.endObject();
})).rootDoc(); })).rootDoc();
assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); assertThat(doc.get(docMapper.mappers().getMapper("name.first").fullPath()), equalTo("shay"));
} }
public void testParseToJsonAndParse() throws Exception { public void testParseToJsonAndParse() throws Exception {
@ -1581,7 +1581,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
LuceneDocument doc = builtDocMapper.parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc(); LuceneDocument doc = builtDocMapper.parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc();
assertThat(doc.getBinaryValue(IdFieldMapper.NAME), equalTo(Uid.encodeId("1"))); assertThat(doc.getBinaryValue(IdFieldMapper.NAME), equalTo(Uid.encodeId("1")));
assertThat(doc.get(builtDocMapper.mappers().getMapper("name.first").name()), equalTo("shay")); assertThat(doc.get(builtDocMapper.mappers().getMapper("name.first").fullPath()), equalTo("shay"));
} }
public void testSimpleParser() throws Exception { public void testSimpleParser() throws Exception {
@ -1593,7 +1593,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
LuceneDocument doc = docMapper.parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc(); LuceneDocument doc = docMapper.parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc();
assertThat(doc.getBinaryValue(IdFieldMapper.NAME), equalTo(Uid.encodeId("1"))); assertThat(doc.getBinaryValue(IdFieldMapper.NAME), equalTo(Uid.encodeId("1")));
assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); assertThat(doc.get(docMapper.mappers().getMapper("name.first").fullPath()), equalTo("shay"));
} }
public void testSimpleParserNoTypeNoId() throws Exception { public void testSimpleParserNoTypeNoId() throws Exception {
@ -1602,7 +1602,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json")); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json"));
LuceneDocument doc = docMapper.parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc(); LuceneDocument doc = docMapper.parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc();
assertThat(doc.getBinaryValue(IdFieldMapper.NAME), equalTo(Uid.encodeId("1"))); assertThat(doc.getBinaryValue(IdFieldMapper.NAME), equalTo(Uid.encodeId("1")));
assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); assertThat(doc.get(docMapper.mappers().getMapper("name.first").fullPath()), equalTo("shay"));
} }
public void testAttributes() throws Exception { public void testAttributes() throws Exception {
@ -2605,7 +2605,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
assertTrue(barMapper instanceof ObjectMapper); assertTrue(barMapper instanceof ObjectMapper);
Mapper baz = ((ObjectMapper) barMapper).getMapper("baz"); Mapper baz = ((ObjectMapper) barMapper).getMapper("baz");
assertNotNull(baz); assertNotNull(baz);
assertEquals("foo.bar.baz", baz.name()); assertEquals("foo.bar.baz", baz.fullPath());
assertEquals("baz", baz.leafName()); assertEquals("baz", baz.leafName());
List<IndexableField> fields = doc.rootDoc().getFields("foo.bar.baz"); List<IndexableField> fields = doc.rootDoc().getFields("foo.bar.baz");
assertEquals(2, fields.size()); assertEquals(2, fields.size());
@ -3245,7 +3245,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
@Override @Override
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
return new StringStoredFieldFieldLoader(name(), leafName(), null) { return new StringStoredFieldFieldLoader(fullPath(), leafName(), null) {
@Override @Override
protected void write(XContentBuilder b, Object value) throws IOException { protected void write(XContentBuilder b, Object value) throws IOException {
BytesRef ref = (BytesRef) value; BytesRef ref = (BytesRef) value;

View file

@ -59,7 +59,7 @@ public class DynamicFieldsBuilderTests extends ESTestCase {
DynamicFieldsBuilder.DYNAMIC_TRUE.createDynamicFieldFromValue(ctx, fieldname); DynamicFieldsBuilder.DYNAMIC_TRUE.createDynamicFieldFromValue(ctx, fieldname);
List<Mapper> dynamicMappers = ctx.getDynamicMappers(); List<Mapper> dynamicMappers = ctx.getDynamicMappers();
assertEquals(1, dynamicMappers.size()); assertEquals(1, dynamicMappers.size());
assertEquals(fieldname, dynamicMappers.get(0).name()); assertEquals(fieldname, dynamicMappers.get(0).fullPath());
assertEquals(expectedType, dynamicMappers.get(0).typeName()); assertEquals(expectedType, dynamicMappers.get(0).typeName());
} }
@ -90,7 +90,7 @@ public class DynamicFieldsBuilderTests extends ESTestCase {
DynamicFieldsBuilder.DYNAMIC_TRUE.createDynamicFieldFromValue(ctx, "f1"); DynamicFieldsBuilder.DYNAMIC_TRUE.createDynamicFieldFromValue(ctx, "f1");
List<Mapper> dynamicMappers = ctx.getDynamicMappers(); List<Mapper> dynamicMappers = ctx.getDynamicMappers();
assertEquals(1, dynamicMappers.size()); assertEquals(1, dynamicMappers.size());
assertEquals("labels.f1", dynamicMappers.get(0).name()); assertEquals("labels.f1", dynamicMappers.get(0).fullPath());
assertEquals("keyword", dynamicMappers.get(0).typeName()); assertEquals("keyword", dynamicMappers.get(0).typeName());
} }
} }

View file

@ -1473,7 +1473,7 @@ public class MapperServiceTests extends MapperServiceTestCase {
assertNotNull(mapper.mappers().getMapper("text_field")); assertNotNull(mapper.mappers().getMapper("text_field"));
FieldMapper.MultiFields multiFields = ((TextFieldMapper) mapper.mappers().getMapper("text_field")).multiFields(); FieldMapper.MultiFields multiFields = ((TextFieldMapper) mapper.mappers().getMapper("text_field")).multiFields();
Map<String, FieldMapper> multiFieldMap = StreamSupport.stream(multiFields.spliterator(), false) Map<String, FieldMapper> multiFieldMap = StreamSupport.stream(multiFields.spliterator(), false)
.collect(Collectors.toMap(FieldMapper::name, Function.identity())); .collect(Collectors.toMap(FieldMapper::fullPath, Function.identity()));
assertThat(multiFieldMap.keySet(), contains("text_field.multi_field1")); assertThat(multiFieldMap.keySet(), contains("text_field.multi_field1"));
assertTrue(multiFieldMap.get("text_field.multi_field1").ignoreMalformed()); assertTrue(multiFieldMap.get("text_field.multi_field1").ignoreMalformed());
} }

View file

@ -101,7 +101,7 @@ public class MappingLookupInferenceFieldMapperTests extends MapperServiceTestCas
@Override @Override
public InferenceFieldMetadata getMetadata(Set<String> sourcePaths) { public InferenceFieldMetadata getMetadata(Set<String> sourcePaths) {
return new InferenceFieldMetadata(name(), INFERENCE_ID, sourcePaths.toArray(new String[0])); return new InferenceFieldMetadata(fullPath(), INFERENCE_ID, sourcePaths.toArray(new String[0]));
} }
@Override @Override

View file

@ -207,25 +207,25 @@ public class MappingParserTests extends MapperServiceTestCase {
Mapper object = mapping.getRoot().getMapper("obj"); Mapper object = mapping.getRoot().getMapper("obj");
assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class)); assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class));
assertEquals("obj", object.leafName()); assertEquals("obj", object.leafName());
assertEquals("obj", object.name()); assertEquals("obj", object.fullPath());
ObjectMapper objectMapper = (ObjectMapper) object; ObjectMapper objectMapper = (ObjectMapper) object;
assertEquals(1, objectMapper.mappers.size()); assertEquals(1, objectMapper.mappers.size());
object = objectMapper.getMapper("source"); object = objectMapper.getMapper("source");
assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class)); assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class));
assertEquals("source", object.leafName()); assertEquals("source", object.leafName());
assertEquals("obj.source", object.name()); assertEquals("obj.source", object.fullPath());
objectMapper = (ObjectMapper) object; objectMapper = (ObjectMapper) object;
assertEquals(1, objectMapper.mappers.size()); assertEquals(1, objectMapper.mappers.size());
object = objectMapper.getMapper("geo"); object = objectMapper.getMapper("geo");
assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class)); assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class));
assertEquals("geo", object.leafName()); assertEquals("geo", object.leafName());
assertEquals("obj.source.geo", object.name()); assertEquals("obj.source.geo", object.fullPath());
objectMapper = (ObjectMapper) object; objectMapper = (ObjectMapper) object;
assertEquals(1, objectMapper.mappers.size()); assertEquals(1, objectMapper.mappers.size());
Mapper location = objectMapper.getMapper("location"); Mapper location = objectMapper.getMapper("location");
assertThat(location, CoreMatchers.instanceOf(GeoPointFieldMapper.class)); assertThat(location, CoreMatchers.instanceOf(GeoPointFieldMapper.class));
GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) location; GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) location;
assertEquals("obj.source.geo.location", geoPointFieldMapper.name()); assertEquals("obj.source.geo.location", geoPointFieldMapper.fullPath());
assertEquals("location", geoPointFieldMapper.leafName()); assertEquals("location", geoPointFieldMapper.leafName());
assertEquals("obj.source.geo.location", geoPointFieldMapper.mappedFieldType.name()); assertEquals("obj.source.geo.location", geoPointFieldMapper.mappedFieldType.name());
} }

View file

@ -222,7 +222,7 @@ public class MultiFieldTests extends MapperServiceTestCase {
Mapper mapper = mapperService.mappingLookup().getMapper("field"); Mapper mapper = mapperService.mappingLookup().getMapper("field");
assertThat(mapper, instanceOf(FieldMapper.class)); assertThat(mapper, instanceOf(FieldMapper.class));
final Set<String> fieldsUsingSourcePath = new HashSet<>(); final Set<String> fieldsUsingSourcePath = new HashSet<>();
((FieldMapper) mapper).sourcePathUsedBy().forEachRemaining(mapper1 -> fieldsUsingSourcePath.add(mapper1.name())); ((FieldMapper) mapper).sourcePathUsedBy().forEachRemaining(mapper1 -> fieldsUsingSourcePath.add(mapper1.fullPath()));
assertThat(fieldsUsingSourcePath, equalTo(Set.of("field.subfield1", "field.subfield2"))); assertThat(fieldsUsingSourcePath, equalTo(Set.of("field.subfield1", "field.subfield2")));
assertThat(mapperService.mappingLookup().sourcePaths("field.subfield1"), equalTo(Set.of("field"))); assertThat(mapperService.mappingLookup().sourcePaths("field.subfield1"), equalTo(Set.of("field")));

View file

@ -54,10 +54,10 @@ public final class ObjectMapperMergeTests extends ESTestCase {
{ {
Mapper bar = mergedFoo.getMapper("bar"); Mapper bar = mergedFoo.getMapper("bar");
assertEquals("bar", bar.leafName()); assertEquals("bar", bar.leafName());
assertEquals("foo.bar", bar.name()); assertEquals("foo.bar", bar.fullPath());
Mapper baz = mergedFoo.getMapper("baz"); Mapper baz = mergedFoo.getMapper("baz");
assertEquals("baz", baz.leafName()); assertEquals("baz", baz.leafName());
assertEquals("foo.baz", baz.name()); assertEquals("foo.baz", baz.fullPath());
} }
} }
@ -139,7 +139,7 @@ public final class ObjectMapperMergeTests extends ESTestCase {
final ObjectMapper merged = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, MAPPING_UPDATE, Long.MAX_VALUE)); final ObjectMapper merged = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, MAPPING_UPDATE, Long.MAX_VALUE));
final KeywordFieldMapper keywordFieldMapper = (KeywordFieldMapper) merged.getMapper("host.name"); final KeywordFieldMapper keywordFieldMapper = (KeywordFieldMapper) merged.getMapper("host.name");
assertEquals("host.name", keywordFieldMapper.name()); assertEquals("host.name", keywordFieldMapper.fullPath());
assertEquals("host.name", keywordFieldMapper.leafName()); assertEquals("host.name", keywordFieldMapper.leafName());
} }
@ -156,7 +156,7 @@ public final class ObjectMapperMergeTests extends ESTestCase {
ObjectMapper foo = (ObjectMapper) merged.getMapper("foo"); ObjectMapper foo = (ObjectMapper) merged.getMapper("foo");
ObjectMapper metrics = (ObjectMapper) foo.getMapper("metrics"); ObjectMapper metrics = (ObjectMapper) foo.getMapper("metrics");
final KeywordFieldMapper keywordFieldMapper = (KeywordFieldMapper) metrics.getMapper("host.name"); final KeywordFieldMapper keywordFieldMapper = (KeywordFieldMapper) metrics.getMapper("host.name");
assertEquals("foo.metrics.host.name", keywordFieldMapper.name()); assertEquals("foo.metrics.host.name", keywordFieldMapper.fullPath());
assertEquals("host.name", keywordFieldMapper.leafName()); assertEquals("host.name", keywordFieldMapper.leafName());
} }
@ -169,10 +169,10 @@ public final class ObjectMapperMergeTests extends ESTestCase {
final ObjectMapper merged = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, MAPPING_UPDATE, Long.MAX_VALUE)); final ObjectMapper merged = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, MAPPING_UPDATE, Long.MAX_VALUE));
TextFieldMapper text = (TextFieldMapper) merged.getMapper("text"); TextFieldMapper text = (TextFieldMapper) merged.getMapper("text");
assertEquals("text", text.name()); assertEquals("text", text.fullPath());
assertEquals("text", text.leafName()); assertEquals("text", text.leafName());
KeywordFieldMapper keyword = (KeywordFieldMapper) text.multiFields().iterator().next(); KeywordFieldMapper keyword = (KeywordFieldMapper) text.multiFields().iterator().next();
assertEquals("text.keyword", keyword.name()); assertEquals("text.keyword", keyword.fullPath());
assertEquals("keyword", keyword.leafName()); assertEquals("keyword", keyword.leafName());
} }
@ -189,10 +189,10 @@ public final class ObjectMapperMergeTests extends ESTestCase {
ObjectMapper foo = (ObjectMapper) merged.getMapper("foo"); ObjectMapper foo = (ObjectMapper) merged.getMapper("foo");
ObjectMapper metrics = (ObjectMapper) foo.getMapper("metrics"); ObjectMapper metrics = (ObjectMapper) foo.getMapper("metrics");
final TextFieldMapper textFieldMapper = (TextFieldMapper) metrics.getMapper("host.name"); final TextFieldMapper textFieldMapper = (TextFieldMapper) metrics.getMapper("host.name");
assertEquals("foo.metrics.host.name", textFieldMapper.name()); assertEquals("foo.metrics.host.name", textFieldMapper.fullPath());
assertEquals("host.name", textFieldMapper.leafName()); assertEquals("host.name", textFieldMapper.leafName());
FieldMapper fieldMapper = textFieldMapper.multiFields.iterator().next(); FieldMapper fieldMapper = textFieldMapper.multiFields.iterator().next();
assertEquals("foo.metrics.host.name.keyword", fieldMapper.name()); assertEquals("foo.metrics.host.name.keyword", fieldMapper.fullPath());
assertEquals("keyword", fieldMapper.leafName()); assertEquals("keyword", fieldMapper.leafName());
} }
@ -325,7 +325,7 @@ public final class ObjectMapperMergeTests extends ESTestCase {
FieldMapper.Builder fieldBuilder = new KeywordFieldMapper.Builder("host.name", IndexVersion.current()); FieldMapper.Builder fieldBuilder = new KeywordFieldMapper.Builder("host.name", IndexVersion.current());
FieldMapper fieldMapper = fieldBuilder.build(MapperBuilderContext.root(false, false)); FieldMapper fieldMapper = fieldBuilder.build(MapperBuilderContext.root(false, false));
assertEquals("host.name", fieldMapper.leafName()); assertEquals("host.name", fieldMapper.leafName());
assertEquals("host.name", fieldMapper.name()); assertEquals("host.name", fieldMapper.fullPath());
return new RootObjectMapper.Builder("_doc", Explicit.EXPLICIT_FALSE).add(fieldBuilder) return new RootObjectMapper.Builder("_doc", Explicit.EXPLICIT_FALSE).add(fieldBuilder)
.build(MapperBuilderContext.root(false, false)); .build(MapperBuilderContext.root(false, false));
} }
@ -343,7 +343,7 @@ public final class ObjectMapperMergeTests extends ESTestCase {
) )
); );
assertEquals("host.name", fieldMapper.leafName()); assertEquals("host.name", fieldMapper.leafName());
assertEquals("foo.metrics.host.name", fieldMapper.name()); assertEquals("foo.metrics.host.name", fieldMapper.fullPath());
return new ObjectMapper.Builder("foo", ObjectMapper.Defaults.SUBOBJECTS).add( return new ObjectMapper.Builder("foo", ObjectMapper.Defaults.SUBOBJECTS).add(
new ObjectMapper.Builder("metrics", Explicit.EXPLICIT_FALSE).add(fieldBuilder) new ObjectMapper.Builder("metrics", Explicit.EXPLICIT_FALSE).add(fieldBuilder)
); );
@ -362,10 +362,10 @@ public final class ObjectMapperMergeTests extends ESTestCase {
) )
); );
assertEquals("host.name", textKeywordMultiField.leafName()); assertEquals("host.name", textKeywordMultiField.leafName());
assertEquals("foo.metrics.host.name", textKeywordMultiField.name()); assertEquals("foo.metrics.host.name", textKeywordMultiField.fullPath());
FieldMapper fieldMapper = textKeywordMultiField.multiFields.iterator().next(); FieldMapper fieldMapper = textKeywordMultiField.multiFields.iterator().next();
assertEquals("keyword", fieldMapper.leafName()); assertEquals("keyword", fieldMapper.leafName());
assertEquals("foo.metrics.host.name.keyword", fieldMapper.name()); assertEquals("foo.metrics.host.name.keyword", fieldMapper.fullPath());
return new ObjectMapper.Builder("foo", ObjectMapper.Defaults.SUBOBJECTS).add( return new ObjectMapper.Builder("foo", ObjectMapper.Defaults.SUBOBJECTS).add(
new ObjectMapper.Builder("metrics", Explicit.EXPLICIT_FALSE).add(fieldBuilder) new ObjectMapper.Builder("metrics", Explicit.EXPLICIT_FALSE).add(fieldBuilder)
); );

View file

@ -607,7 +607,7 @@ public class ObjectMapperTests extends MapperServiceTestCase {
new KeywordFieldMapper.Builder("keyword2", IndexVersion.current()) new KeywordFieldMapper.Builder("keyword2", IndexVersion.current())
) )
).add(new KeywordFieldMapper.Builder("keyword1", IndexVersion.current())).build(rootContext); ).add(new KeywordFieldMapper.Builder("keyword1", IndexVersion.current())).build(rootContext);
List<String> fields = objectMapper.asFlattenedFieldMappers(rootContext).stream().map(FieldMapper::name).toList(); List<String> fields = objectMapper.asFlattenedFieldMappers(rootContext).stream().map(FieldMapper::fullPath).toList();
assertThat(fields, containsInAnyOrder("parent.keyword1", "parent.child.keyword2")); assertThat(fields, containsInAnyOrder("parent.keyword1", "parent.child.keyword2"));
} }

View file

@ -112,7 +112,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
XContentType.JSON XContentType.JSON
) )
); );
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertContextSuggestFields(fields, 7); assertContextSuggestFields(fields, 7);
} }
@ -156,7 +156,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
XContentType.JSON XContentType.JSON
) )
); );
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertContextSuggestFields(fields, 3); assertContextSuggestFields(fields, 3);
} }
@ -200,7 +200,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
XContentType.JSON XContentType.JSON
) )
); );
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertContextSuggestFields(fields, 3); assertContextSuggestFields(fields, 3);
} }
@ -244,7 +244,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
XContentType.JSON XContentType.JSON
) )
); );
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertContextSuggestFields(fields, 3); assertContextSuggestFields(fields, 3);
} }
@ -328,7 +328,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
XContentType.JSON XContentType.JSON
) )
); );
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertContextSuggestFields(fields, 3); assertContextSuggestFields(fields, 3);
} }
@ -370,7 +370,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
XContentType.JSON XContentType.JSON
) )
); );
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertContextSuggestFields(fields, 3); assertContextSuggestFields(fields, 3);
} }
@ -449,7 +449,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
.endArray() .endArray()
.endObject(); .endObject();
ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("1", BytesReference.bytes(builder), XContentType.JSON)); ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("1", BytesReference.bytes(builder), XContentType.JSON));
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
assertContextSuggestFields(fields, 3); assertContextSuggestFields(fields, 3);
} }

View file

@ -1039,13 +1039,13 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
return "All fields that match routing_path must be configured with [time_series_dimension: true] " return "All fields that match routing_path must be configured with [time_series_dimension: true] "
+ "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "or flattened fields with a list of dimensions in [time_series_dimensions] and "
+ "without the [script] parameter. [" + "without the [script] parameter. ["
+ mapper.name() + mapper.fullPath()
+ "] was not a dimension."; + "] was not a dimension.";
} }
return "All fields that match routing_path must be configured with [time_series_dimension: true] " return "All fields that match routing_path must be configured with [time_series_dimension: true] "
+ "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "or flattened fields with a list of dimensions in [time_series_dimensions] and "
+ "without the [script] parameter. [" + "without the [script] parameter. ["
+ mapper.name() + mapper.fullPath()
+ "] was [" + "] was ["
+ mapper.typeName() + mapper.typeName()
+ "]."; + "].";

View file

@ -334,7 +334,7 @@ public class HistogramFieldMapper extends FieldMapper {
throw new DocumentParsingException( throw new DocumentParsingException(
subParser.getTokenLocation(), subParser.getTokenLocation(),
"error parsing field [" "error parsing field ["
+ name() + fullPath()
+ "], [" + "], ["
+ VALUES_FIELD + VALUES_FIELD
+ "] values must be in increasing order, got [" + "] values must be in increasing order, got ["
@ -363,7 +363,7 @@ public class HistogramFieldMapper extends FieldMapper {
} else { } else {
throw new DocumentParsingException( throw new DocumentParsingException(
subParser.getTokenLocation(), subParser.getTokenLocation(),
"error parsing field [" + name() + "], with unknown parameter [" + fieldName + "]" "error parsing field [" + fullPath() + "], with unknown parameter [" + fieldName + "]"
); );
} }
token = subParser.nextToken(); token = subParser.nextToken();
@ -371,20 +371,20 @@ public class HistogramFieldMapper extends FieldMapper {
if (values == null) { if (values == null) {
throw new DocumentParsingException( throw new DocumentParsingException(
subParser.getTokenLocation(), subParser.getTokenLocation(),
"error parsing field [" + name() + "], expected field called [" + VALUES_FIELD.getPreferredName() + "]" "error parsing field [" + fullPath() + "], expected field called [" + VALUES_FIELD.getPreferredName() + "]"
); );
} }
if (counts == null) { if (counts == null) {
throw new DocumentParsingException( throw new DocumentParsingException(
subParser.getTokenLocation(), subParser.getTokenLocation(),
"error parsing field [" + name() + "], expected field called [" + COUNTS_FIELD.getPreferredName() + "]" "error parsing field [" + fullPath() + "], expected field called [" + COUNTS_FIELD.getPreferredName() + "]"
); );
} }
if (values.size() != counts.size()) { if (values.size() != counts.size()) {
throw new DocumentParsingException( throw new DocumentParsingException(
subParser.getTokenLocation(), subParser.getTokenLocation(),
"error parsing field [" "error parsing field ["
+ name() + fullPath()
+ "], expected same length from [" + "], expected same length from ["
+ VALUES_FIELD.getPreferredName() + VALUES_FIELD.getPreferredName()
+ "] and " + "] and "
@ -403,7 +403,7 @@ public class HistogramFieldMapper extends FieldMapper {
if (count < 0) { if (count < 0) {
throw new DocumentParsingException( throw new DocumentParsingException(
subParser.getTokenLocation(), subParser.getTokenLocation(),
"error parsing field [" + name() + "], [" + COUNTS_FIELD + "] elements must be >= 0 but got " + counts.get(i) "error parsing field [" + fullPath() + "], [" + COUNTS_FIELD + "] elements must be >= 0 but got " + counts.get(i)
); );
} else if (count > 0) { } else if (count > 0) {
// we do not add elements with count == 0 // we do not add elements with count == 0
@ -416,11 +416,11 @@ public class HistogramFieldMapper extends FieldMapper {
} }
} }
BytesRef docValue = streamOutput.bytes().toBytesRef(); BytesRef docValue = streamOutput.bytes().toBytesRef();
Field field = new BinaryDocValuesField(name(), docValue); Field field = new BinaryDocValuesField(fullPath(), docValue);
if (context.doc().getByKey(fieldType().name()) != null) { if (context.doc().getByKey(fieldType().name()) != null) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Field [" "Field ["
+ name() + fullPath()
+ "] of type [" + "] of type ["
+ typeName() + typeName()
+ "] doesn't support indexing multiple values for the same field in the same document" + "] doesn't support indexing multiple values for the same field in the same document"
@ -448,7 +448,7 @@ public class HistogramFieldMapper extends FieldMapper {
} }
if (malformedDataForSyntheticSource != null) { if (malformedDataForSyntheticSource != null) {
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), malformedDataForSyntheticSource)); context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), malformedDataForSyntheticSource));
} }
context.addIgnoredField(fieldType().name()); context.addIgnoredField(fieldType().name());
@ -516,15 +516,15 @@ public class HistogramFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [histogram] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [histogram] doesn't support synthetic source because it declares copy_to"
); );
} }
return new CompositeSyntheticFieldLoader( return new CompositeSyntheticFieldLoader(
leafName(), leafName(),
name(), fullPath(),
new HistogramSyntheticFieldLoader(), new HistogramSyntheticFieldLoader(),
new CompositeSyntheticFieldLoader.MalformedValuesLayer(name()) new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath())
); );
} }
@ -587,7 +587,7 @@ public class HistogramFieldMapper extends FieldMapper {
@Override @Override
public String fieldName() { public String fieldName() {
return name(); return fullPath();
} }
@Override @Override

View file

@ -197,7 +197,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie
boolean isWithinLeaf = context.path().isWithinLeafObject(); boolean isWithinLeaf = context.path().isWithinLeafObject();
try { try {
context.path().setWithinLeafObject(true); context.path().setWithinLeafObject(true);
field = SemanticTextField.parse(parser, new Tuple<>(name(), context.parser().contentType())); field = SemanticTextField.parse(parser, new Tuple<>(fullPath(), context.parser().contentType()));
} finally { } finally {
context.path().setWithinLeafObject(isWithinLeaf); context.path().setWithinLeafObject(isWithinLeaf);
} }
@ -242,7 +242,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie
throw new DocumentParsingException( throw new DocumentParsingException(
xContentLocation, xContentLocation,
"Incompatible model settings for field [" "Incompatible model settings for field ["
+ name() + fullPath()
+ "]. Check that the " + "]. Check that the "
+ INFERENCE_ID_FIELD + INFERENCE_ID_FIELD
+ " is not using different model settings", + " is not using different model settings",
@ -284,12 +284,12 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie
String[] copyFields = sourcePaths.toArray(String[]::new); String[] copyFields = sourcePaths.toArray(String[]::new);
// ensure consistent order // ensure consistent order
Arrays.sort(copyFields); Arrays.sort(copyFields);
return new InferenceFieldMetadata(name(), fieldType().inferenceId, copyFields); return new InferenceFieldMetadata(fullPath(), fieldType().inferenceId, copyFields);
} }
@Override @Override
public Object getOriginalValue(Map<String, Object> sourceAsMap) { public Object getOriginalValue(Map<String, Object> sourceAsMap) {
Object fieldValue = sourceAsMap.get(name()); Object fieldValue = sourceAsMap.get(fullPath());
if (fieldValue == null) { if (fieldValue == null) {
return null; return null;
} else if (fieldValue instanceof Map<?, ?> == false) { } else if (fieldValue instanceof Map<?, ?> == false) {
@ -297,7 +297,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie
return fieldValue; return fieldValue;
} }
Map<String, Object> fieldValueMap = XContentMapValues.nodeMapValue(fieldValue, "Field [" + name() + "]"); Map<String, Object> fieldValueMap = XContentMapValues.nodeMapValue(fieldValue, "Field [" + fullPath() + "]");
return XContentMapValues.extractValue(TEXT_FIELD, fieldValueMap); return XContentMapValues.extractValue(TEXT_FIELD, fieldValueMap);
} }

View file

@ -314,7 +314,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase {
.getNestedMappers() .getNestedMappers()
.get(getChunksFieldName(fieldName)); .get(getChunksFieldName(fieldName));
assertThat(chunksMapper, equalTo(semanticFieldMapper.fieldType().getChunksField())); assertThat(chunksMapper, equalTo(semanticFieldMapper.fieldType().getChunksField()));
assertThat(chunksMapper.name(), equalTo(getChunksFieldName(fieldName))); assertThat(chunksMapper.fullPath(), equalTo(getChunksFieldName(fieldName)));
Mapper textMapper = chunksMapper.getMapper(CHUNKED_TEXT_FIELD); Mapper textMapper = chunksMapper.getMapper(CHUNKED_TEXT_FIELD);
assertNotNull(textMapper); assertNotNull(textMapper);
assertThat(textMapper, instanceOf(KeywordFieldMapper.class)); assertThat(textMapper, instanceOf(KeywordFieldMapper.class));
@ -328,7 +328,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase {
assertThat(embeddingsMapper, instanceOf(FieldMapper.class)); assertThat(embeddingsMapper, instanceOf(FieldMapper.class));
FieldMapper embeddingsFieldMapper = (FieldMapper) embeddingsMapper; FieldMapper embeddingsFieldMapper = (FieldMapper) embeddingsMapper;
assertTrue(embeddingsFieldMapper.fieldType() == mapperService.mappingLookup().getFieldType(getEmbeddingsFieldName(fieldName))); assertTrue(embeddingsFieldMapper.fieldType() == mapperService.mappingLookup().getFieldType(getEmbeddingsFieldName(fieldName)));
assertThat(embeddingsMapper.name(), equalTo(getEmbeddingsFieldName(fieldName))); assertThat(embeddingsMapper.fullPath(), equalTo(getEmbeddingsFieldName(fieldName)));
switch (semanticFieldMapper.fieldType().getModelSettings().taskType()) { switch (semanticFieldMapper.fieldType().getModelSettings().taskType()) {
case SPARSE_EMBEDDING -> assertThat(embeddingsMapper, instanceOf(SparseVectorFieldMapper.class)); case SPARSE_EMBEDDING -> assertThat(embeddingsMapper, instanceOf(SparseVectorFieldMapper.class));
case TEXT_EMBEDDING -> assertThat(embeddingsMapper, instanceOf(DenseVectorFieldMapper.class)); case TEXT_EMBEDDING -> assertThat(embeddingsMapper, instanceOf(DenseVectorFieldMapper.class));

View file

@ -665,7 +665,7 @@ public class AggregateDoubleMetricFieldMapper extends FieldMapper {
if (context.doc().getByKey(delegateFieldMapper.fieldType().name()) != null) { if (context.doc().getByKey(delegateFieldMapper.fieldType().name()) != null) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Field [" "Field ["
+ name() + fullPath()
+ "] of type [" + "] of type ["
+ typeName() + typeName()
+ "] does not support indexing multiple values for the same field in the same document" + "] does not support indexing multiple values for the same field in the same document"
@ -687,10 +687,10 @@ public class AggregateDoubleMetricFieldMapper extends FieldMapper {
} }
if (malformedDataForSyntheticSource != null) { if (malformedDataForSyntheticSource != null) {
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), malformedDataForSyntheticSource)); context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), malformedDataForSyntheticSource));
} }
context.addIgnoredField(name()); context.addIgnoredField(fullPath());
context.path().remove(); context.path().remove();
return; return;
} }
@ -719,9 +719,9 @@ public class AggregateDoubleMetricFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
return new CompositeSyntheticFieldLoader( return new CompositeSyntheticFieldLoader(
leafName(), leafName(),
name(), fullPath(),
new AggregateMetricSyntheticFieldLoader(name(), leafName(), metrics), new AggregateMetricSyntheticFieldLoader(fullPath(), leafName(), metrics),
new CompositeSyntheticFieldLoader.MalformedValuesLayer(name()) new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath())
); );
} }

View file

@ -316,7 +316,7 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
final String value = parser.textOrNull(); final String value = parser.textOrNull();
if (value == null) { if (value == null) {
throw new IllegalArgumentException("[constant_keyword] field [" + name() + "] doesn't accept [null] values"); throw new IllegalArgumentException("[constant_keyword] field [" + fullPath() + "] doesn't accept [null] values");
} }
if (fieldType().value == null) { if (fieldType().value == null) {
@ -328,7 +328,7 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
} else if (Objects.equals(fieldType().value, value) == false) { } else if (Objects.equals(fieldType().value, value) == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"[constant_keyword] field [" "[constant_keyword] field ["
+ name() + fullPath()
+ "] only accepts values that are equal to the value defined in the mappings [" + "] only accepts values that are equal to the value defined in the mappings ["
+ fieldType().value() + fieldType().value()
+ "], but got [" + "], but got ["
@ -380,7 +380,7 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
@Override @Override
public String fieldName() { public String fieldName() {
return name(); return fullPath();
} }
}; };
} }

View file

@ -354,12 +354,12 @@ public class CountedKeywordFieldMapper extends FieldMapper {
int i = 0; int i = 0;
int[] counts = new int[values.size()]; int[] counts = new int[values.size()];
for (Map.Entry<String, Integer> value : values.entrySet()) { for (Map.Entry<String, Integer> value : values.entrySet()) {
context.doc().add(new KeywordFieldMapper.KeywordField(name(), new BytesRef(value.getKey()), fieldType)); context.doc().add(new KeywordFieldMapper.KeywordField(fullPath(), new BytesRef(value.getKey()), fieldType));
counts[i++] = value.getValue(); counts[i++] = value.getValue();
} }
BytesStreamOutput streamOutput = new BytesStreamOutput(); BytesStreamOutput streamOutput = new BytesStreamOutput();
streamOutput.writeVIntArray(counts); streamOutput.writeVIntArray(counts);
context.doc().add(new BinaryDocValuesField(countFieldMapper.name(), streamOutput.bytes().toBytesRef())); context.doc().add(new BinaryDocValuesField(countFieldMapper.fullPath(), streamOutput.bytes().toBytesRef()));
} }
private void parseArray(DocumentParserContext context, SortedMap<String, Integer> values) throws IOException { private void parseArray(DocumentParserContext context, SortedMap<String, Integer> values) throws IOException {

View file

@ -636,7 +636,7 @@ public class UnsignedLongFieldMapper extends FieldMapper {
context.addIgnoredField(mappedFieldType.name()); context.addIgnoredField(mappedFieldType.name());
if (isSourceSynthetic) { if (isSourceSynthetic) {
// Save a copy of the field so synthetic source can load it // Save a copy of the field so synthetic source can load it
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser())); context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
} }
return; return;
} else { } else {
@ -753,9 +753,9 @@ public class UnsignedLongFieldMapper extends FieldMapper {
@Override @Override
public void doValidate(MappingLookup lookup) { public void doValidate(MappingLookup lookup) {
if (dimension && null != lookup.nestedLookup().getNestedParent(name())) { if (dimension && null != lookup.nestedLookup().getNestedParent(fullPath())) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + fullPath() + "]"
); );
} }
} }
@ -769,15 +769,19 @@ public class UnsignedLongFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (hasDocValues == false) { if (hasDocValues == false) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values" "field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support synthetic source because it doesn't have doc values"
); );
} }
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
return new SortedNumericDocValuesSyntheticFieldLoader(name(), leafName(), ignoreMalformed()) { return new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed()) {
@Override @Override
protected void writeValue(XContentBuilder b, long value) throws IOException { protected void writeValue(XContentBuilder b, long value) throws IOException {
b.value(DocValueFormat.UNSIGNED_LONG_SHIFTED.format(value)); b.value(DocValueFormat.UNSIGNED_LONG_SHIFTED.format(value));

View file

@ -454,10 +454,10 @@ public class VersionStringFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
return new SortedSetDocValuesSyntheticFieldLoader(name(), leafName(), null, false) { return new SortedSetDocValuesSyntheticFieldLoader(fullPath(), leafName(), null, false) {
@Override @Override
protected BytesRef convert(BytesRef value) { protected BytesRef convert(BytesRef value) {
return VersionEncoder.decodeVersion(value); return VersionEncoder.decodeVersion(value);

View file

@ -476,7 +476,7 @@ public class GeoShapeWithDocValuesFieldMapper extends AbstractShapeGeometryField
protected void checkIncomingMergeType(FieldMapper mergeWith) { protected void checkIncomingMergeType(FieldMapper mergeWith) {
if (mergeWith instanceof GeoShapeWithDocValuesFieldMapper == false && CONTENT_TYPE.equals(mergeWith.typeName())) { if (mergeWith instanceof GeoShapeWithDocValuesFieldMapper == false && CONTENT_TYPE.equals(mergeWith.typeName())) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"mapper [" + name() + "] of type [geo_shape] cannot change strategy from [BKD] to [recursive]" "mapper [" + fullPath() + "] of type [geo_shape] cannot change strategy from [BKD] to [recursive]"
); );
} }
super.checkIncomingMergeType(mergeWith); super.checkIncomingMergeType(mergeWith);

View file

@ -949,7 +949,7 @@ public class WildcardFieldMapper extends FieldMapper {
if (value.length() <= ignoreAbove) { if (value.length() <= ignoreAbove) {
createFields(value, parseDoc, fields); createFields(value, parseDoc, fields);
} else { } else {
context.addIgnoredField(name()); context.addIgnoredField(fullPath());
if (storeIgnored) { if (storeIgnored) {
parseDoc.add(new StoredField(originalName(), new BytesRef(value))); parseDoc.add(new StoredField(originalName(), new BytesRef(value)));
} }
@ -959,7 +959,7 @@ public class WildcardFieldMapper extends FieldMapper {
} }
private String originalName() { private String originalName() {
return name() + "._original"; return fullPath() + "._original";
} }
void createFields(String value, LuceneDocument parseDoc, List<IndexableField> fields) { void createFields(String value, LuceneDocument parseDoc, List<IndexableField> fields) {
@ -1000,7 +1000,7 @@ public class WildcardFieldMapper extends FieldMapper {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (copyTo.copyToFields().isEmpty() != true) { if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" "field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
); );
} }
return new WildcardSyntheticFieldLoader(); return new WildcardSyntheticFieldLoader();
@ -1023,7 +1023,7 @@ public class WildcardFieldMapper extends FieldMapper {
@Override @Override
public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException { public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException {
BinaryDocValues values = leafReader.getBinaryDocValues(name()); BinaryDocValues values = leafReader.getBinaryDocValues(fullPath());
if (values == null) { if (values == null) {
docValueCount = 0; docValueCount = 0;
return null; return null;
@ -1075,7 +1075,7 @@ public class WildcardFieldMapper extends FieldMapper {
@Override @Override
public String fieldName() { public String fieldName() {
return name(); return fullPath();
} }
} }
} }

View file

@ -54,7 +54,7 @@ public class WildcardFieldAggregationTests extends AggregatorTestCase {
} }
private void indexDoc(LuceneDocument parseDoc, Document doc, RandomIndexWriter iw) throws IOException { private void indexDoc(LuceneDocument parseDoc, Document doc, RandomIndexWriter iw) throws IOException {
IndexableField field = parseDoc.getByKey(wildcardFieldMapper.name()); IndexableField field = parseDoc.getByKey(wildcardFieldMapper.fullPath());
if (field != null) { if (field != null) {
doc.add(field); doc.add(field);
} }

View file

@ -1136,7 +1136,7 @@ public class WildcardFieldMapperTests extends MapperTestCase {
} }
private void indexDoc(LuceneDocument parseDoc, Document doc, RandomIndexWriter iw) throws IOException { private void indexDoc(LuceneDocument parseDoc, Document doc, RandomIndexWriter iw) throws IOException {
IndexableField field = parseDoc.getByKey(wildcardFieldType.name()); IndexableField field = parseDoc.getByKey(wildcardFieldType.fullPath());
if (field != null) { if (field != null) {
doc.add(field); doc.add(field);
} }