mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-06-28 01:22:26 -04:00
Rename Mapper#name to Mapper#fullPath (#110040)
This addresses a long standing TODO that caused quite a few bugs over time, in that the mapper name does not include its full path, while the MappedFieldType name does. We have renamed Mapper.Builder#name to leafName (#109971) and Mapper#simpleName to leafName (#110030). This commit renames Mapper#name to fullPath for clarity This required some adjustments in FieldAliasMapper to avoid confusion between the existing path method and fullPath. I renamed path to targetPath for clarity. ObjectMapper already had a fullPath method that returned name, and was effectively a copy of name, so it could be removed.
This commit is contained in:
parent
b0d9f95e53
commit
915e4a50c5
71 changed files with 354 additions and 312 deletions
|
@ -217,7 +217,7 @@ public class DataStreamIndexSettingsProvider implements IndexSettingProvider {
|
|||
private static void extractPath(List<String> routingPaths, Mapper mapper) {
|
||||
if (mapper instanceof KeywordFieldMapper keywordFieldMapper) {
|
||||
if (keywordFieldMapper.fieldType().isDimension()) {
|
||||
routingPaths.add(mapper.name());
|
||||
routingPaths.add(mapper.fullPath());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -621,7 +621,7 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
|
|||
protected void checkIncomingMergeType(FieldMapper mergeWith) {
|
||||
if (mergeWith instanceof LegacyGeoShapeFieldMapper == false && CONTENT_TYPE.equals(mergeWith.typeName())) {
|
||||
throw new IllegalArgumentException(
|
||||
"mapper [" + name() + "] of type [geo_shape] cannot change strategy from [recursive] to [BKD]"
|
||||
"mapper [" + fullPath() + "] of type [geo_shape] cannot change strategy from [recursive] to [BKD]"
|
||||
);
|
||||
}
|
||||
super.checkIncomingMergeType(mergeWith);
|
||||
|
|
|
@ -444,7 +444,7 @@ public class MatchOnlyTextFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
return new StringStoredFieldFieldLoader(fieldType().storedFieldNameForSyntheticSource(), leafName(), null) {
|
||||
|
|
|
@ -205,9 +205,9 @@ public class RankFeatureFieldMapper extends FieldMapper {
|
|||
value = context.parser().floatValue();
|
||||
}
|
||||
|
||||
if (context.doc().getByKey(name()) != null) {
|
||||
if (context.doc().getByKey(fullPath()) != null) {
|
||||
throw new IllegalArgumentException(
|
||||
"[rank_feature] fields do not support indexing multiple values for the same field [" + name() + "] in the same document"
|
||||
"[rank_feature] fields do not support indexing multiple values for the same field [" + fullPath() + "] in the same document"
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -215,7 +215,7 @@ public class RankFeatureFieldMapper extends FieldMapper {
|
|||
value = 1 / value;
|
||||
}
|
||||
|
||||
context.doc().addWithKey(name(), new FeatureField(NAME, name(), value));
|
||||
context.doc().addWithKey(fullPath(), new FeatureField(NAME, fullPath(), value));
|
||||
}
|
||||
|
||||
private static Float objectToFloat(Object value) {
|
||||
|
|
|
@ -174,7 +174,7 @@ public class RankFeaturesFieldMapper extends FieldMapper {
|
|||
} else if (token == Token.VALUE_NULL) {
|
||||
// ignore feature, this is consistent with numeric fields
|
||||
} else if (token == Token.VALUE_NUMBER || token == Token.VALUE_STRING) {
|
||||
final String key = name() + "." + feature;
|
||||
final String key = fullPath() + "." + feature;
|
||||
float value = context.parser().floatValue(true);
|
||||
if (context.doc().getByKey(key) != null) {
|
||||
throw new IllegalArgumentException(
|
||||
|
@ -187,7 +187,7 @@ public class RankFeaturesFieldMapper extends FieldMapper {
|
|||
if (positiveScoreImpact == false) {
|
||||
value = 1 / value;
|
||||
}
|
||||
context.doc().addWithKey(key, new FeatureField(name(), feature, value));
|
||||
context.doc().addWithKey(key, new FeatureField(fullPath(), feature, value));
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"[rank_features] fields take hashes that map a feature to a strictly positive "
|
||||
|
|
|
@ -531,7 +531,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
context.addIgnoredField(mappedFieldType.name());
|
||||
if (isSourceSynthetic) {
|
||||
// Save a copy of the field so synthetic source can load it
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser()));
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
|
@ -559,7 +559,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
context.addIgnoredField(mappedFieldType.name());
|
||||
if (isSourceSynthetic) {
|
||||
// Save a copy of the field so synthetic source can load it
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser()));
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
|
@ -721,15 +721,19 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
if (hasDocValues == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
|
||||
"field ["
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support synthetic source because it doesn't have doc values"
|
||||
);
|
||||
}
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
return new SortedNumericDocValuesSyntheticFieldLoader(name(), leafName(), ignoreMalformed.value()) {
|
||||
return new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value()) {
|
||||
@Override
|
||||
protected void writeValue(XContentBuilder b, long value) throws IOException {
|
||||
b.value(decodeForSyntheticSource(value, scalingFactor));
|
||||
|
|
|
@ -163,7 +163,7 @@ public class TokenCountFieldMapper extends FieldMapper {
|
|||
if (value == null) {
|
||||
tokenCount = nullValue;
|
||||
} else {
|
||||
tokenCount = countPositions(analyzer, name(), value, enablePositionIncrements);
|
||||
tokenCount = countPositions(analyzer, fullPath(), value, enablePositionIncrements);
|
||||
}
|
||||
|
||||
NumberFieldMapper.NumberType.INTEGER.addFields(context.doc(), fieldType().name(), tokenCount, index, hasDocValues, store);
|
||||
|
|
|
@ -690,7 +690,7 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase {
|
|||
assertSearchAsYouTypeFieldType(mapper, mapper.fieldType(), maxShingleSize, analyzerName, mapper.prefixField().fieldType());
|
||||
|
||||
assertThat(mapper.prefixField(), notNullValue());
|
||||
assertThat(mapper.prefixField().fieldType().parentField, equalTo(mapper.name()));
|
||||
assertThat(mapper.prefixField().fieldType().parentField, equalTo(mapper.fullPath()));
|
||||
assertPrefixFieldType(mapper.prefixField(), mapper.indexAnalyzers(), maxShingleSize, analyzerName);
|
||||
|
||||
for (int shingleSize = 2; shingleSize <= maxShingleSize; shingleSize++) {
|
||||
|
@ -709,7 +709,7 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase {
|
|||
assertThat(mapper.shingleFields().length, equalTo(numberOfShingleSubfields));
|
||||
|
||||
final Set<String> fieldsUsingSourcePath = new HashSet<>();
|
||||
mapper.sourcePathUsedBy().forEachRemaining(mapper1 -> fieldsUsingSourcePath.add(mapper1.name()));
|
||||
mapper.sourcePathUsedBy().forEachRemaining(mapper1 -> fieldsUsingSourcePath.add(mapper1.fullPath()));
|
||||
int multiFields = 0;
|
||||
for (FieldMapper ignored : mapper.multiFields()) {
|
||||
multiFields++;
|
||||
|
@ -717,12 +717,12 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase {
|
|||
assertThat(fieldsUsingSourcePath.size(), equalTo(numberOfShingleSubfields + 1 + multiFields));
|
||||
|
||||
final Set<String> expectedFieldsUsingSourcePath = new HashSet<>();
|
||||
expectedFieldsUsingSourcePath.add(mapper.prefixField().name());
|
||||
expectedFieldsUsingSourcePath.add(mapper.prefixField().fullPath());
|
||||
for (ShingleFieldMapper shingleFieldMapper : mapper.shingleFields()) {
|
||||
expectedFieldsUsingSourcePath.add(shingleFieldMapper.name());
|
||||
expectedFieldsUsingSourcePath.add(shingleFieldMapper.fullPath());
|
||||
}
|
||||
for (FieldMapper multiField : mapper.multiFields()) {
|
||||
expectedFieldsUsingSourcePath.add(multiField.name());
|
||||
expectedFieldsUsingSourcePath.add(multiField.fullPath());
|
||||
}
|
||||
assertThat(fieldsUsingSourcePath, equalTo(expectedFieldsUsingSourcePath));
|
||||
}
|
||||
|
|
|
@ -128,7 +128,7 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
|||
relations.get()
|
||||
.stream()
|
||||
.map(relation -> new ParentIdFieldMapper(leafName() + "#" + relation.parent(), eagerGlobalOrdinals.get()))
|
||||
.forEach(mapper -> parentIdFields.put(mapper.name(), mapper));
|
||||
.forEach(mapper -> parentIdFields.put(mapper.fullPath(), mapper));
|
||||
Joiner joiner = new Joiner(leafName(), relations.get());
|
||||
return new ParentJoinFieldMapper(
|
||||
leafName(),
|
||||
|
@ -264,13 +264,17 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
|||
} else if ("parent".equals(currentFieldName)) {
|
||||
parent = context.parser().text();
|
||||
} else {
|
||||
throw new IllegalArgumentException("unknown field name [" + currentFieldName + "] in join field [" + name() + "]");
|
||||
throw new IllegalArgumentException(
|
||||
"unknown field name [" + currentFieldName + "] in join field [" + fullPath() + "]"
|
||||
);
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if ("parent".equals(currentFieldName)) {
|
||||
parent = context.parser().numberValue().toString();
|
||||
} else {
|
||||
throw new IllegalArgumentException("unknown field name [" + currentFieldName + "] in join field [" + name() + "]");
|
||||
throw new IllegalArgumentException(
|
||||
"unknown field name [" + currentFieldName + "] in join field [" + fullPath() + "]"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -278,23 +282,23 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
|||
name = context.parser().text();
|
||||
parent = null;
|
||||
} else {
|
||||
throw new IllegalStateException("[" + name() + "] expected START_OBJECT or VALUE_STRING but was: " + token);
|
||||
throw new IllegalStateException("[" + fullPath() + "] expected START_OBJECT or VALUE_STRING but was: " + token);
|
||||
}
|
||||
|
||||
if (name == null) {
|
||||
throw new IllegalArgumentException("null join name in field [" + name() + "]");
|
||||
throw new IllegalArgumentException("null join name in field [" + fullPath() + "]");
|
||||
}
|
||||
|
||||
if (fieldType().joiner.knownRelation(name) == false) {
|
||||
throw new IllegalArgumentException("unknown join name [" + name + "] for field [" + name() + "]");
|
||||
throw new IllegalArgumentException("unknown join name [" + name + "] for field [" + fullPath() + "]");
|
||||
}
|
||||
if (fieldType().joiner.childTypeExists(name)) {
|
||||
// Index the document as a child
|
||||
if (parent == null) {
|
||||
throw new IllegalArgumentException("[parent] is missing for join field [" + name() + "]");
|
||||
throw new IllegalArgumentException("[parent] is missing for join field [" + fullPath() + "]");
|
||||
}
|
||||
if (context.routing() == null) {
|
||||
throw new IllegalArgumentException("[routing] is missing for join field [" + name() + "]");
|
||||
throw new IllegalArgumentException("[routing] is missing for join field [" + fullPath() + "]");
|
||||
}
|
||||
String fieldName = fieldType().joiner.parentJoinField(name);
|
||||
parentIdFields.get(fieldName).indexValue(context, parent);
|
||||
|
|
|
@ -430,13 +430,13 @@ public class ParentJoinFieldMapperTests extends MapperServiceTestCase {
|
|||
|
||||
Iterator<Mapper> it = mapper.iterator();
|
||||
FieldMapper next = (FieldMapper) it.next();
|
||||
assertThat(next.name(), equalTo("join_field#parent"));
|
||||
assertThat(next.fullPath(), equalTo("join_field#parent"));
|
||||
assertTrue(next.fieldType().isSearchable());
|
||||
assertTrue(next.fieldType().isAggregatable());
|
||||
|
||||
assertTrue(it.hasNext());
|
||||
next = (FieldMapper) it.next();
|
||||
assertThat(next.name(), equalTo("join_field#child"));
|
||||
assertThat(next.fullPath(), equalTo("join_field#child"));
|
||||
assertTrue(next.fieldType().isSearchable());
|
||||
assertTrue(next.fieldType().isAggregatable());
|
||||
|
||||
|
|
|
@ -407,7 +407,7 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
@Override
|
||||
public void parse(DocumentParserContext context) throws IOException {
|
||||
SearchExecutionContext executionContext = this.searchExecutionContext.get();
|
||||
if (context.doc().getField(queryBuilderField.name()) != null) {
|
||||
if (context.doc().getField(queryBuilderField.fullPath()) != null) {
|
||||
// If a percolator query has been defined in an array object then multiple percolator queries
|
||||
// could be provided. In order to prevent this we fail if we try to parse more than one query
|
||||
// for the current document.
|
||||
|
@ -493,27 +493,27 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
builder.append(new BytesRef(extraction.field()));
|
||||
builder.append(FIELD_VALUE_SEPARATOR);
|
||||
builder.append(extraction.bytes());
|
||||
doc.add(new StringField(queryTermsField.name(), builder.toBytesRef(), Field.Store.NO));
|
||||
doc.add(new StringField(queryTermsField.fullPath(), builder.toBytesRef(), Field.Store.NO));
|
||||
} else if (extraction.range != null) {
|
||||
byte[] min = extraction.range.lowerPoint;
|
||||
byte[] max = extraction.range.upperPoint;
|
||||
doc.add(new BinaryRange(rangeFieldMapper.name(), encodeRange(extraction.range.fieldName, min, max)));
|
||||
doc.add(new BinaryRange(rangeFieldMapper.fullPath(), encodeRange(extraction.range.fieldName, min, max)));
|
||||
}
|
||||
}
|
||||
|
||||
if (result.matchAllDocs) {
|
||||
doc.add(new StringField(extractionResultField.name(), EXTRACTION_FAILED, Field.Store.NO));
|
||||
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_FAILED, Field.Store.NO));
|
||||
if (result.verified) {
|
||||
doc.add(new StringField(extractionResultField.name(), EXTRACTION_COMPLETE, Field.Store.NO));
|
||||
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_COMPLETE, Field.Store.NO));
|
||||
}
|
||||
} else if (result.verified) {
|
||||
doc.add(new StringField(extractionResultField.name(), EXTRACTION_COMPLETE, Field.Store.NO));
|
||||
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_COMPLETE, Field.Store.NO));
|
||||
} else {
|
||||
doc.add(new StringField(extractionResultField.name(), EXTRACTION_PARTIAL, Field.Store.NO));
|
||||
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_PARTIAL, Field.Store.NO));
|
||||
}
|
||||
|
||||
context.addToFieldNames(fieldType().name());
|
||||
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch));
|
||||
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.fullPath(), result.minimumShouldMatch));
|
||||
}
|
||||
|
||||
static SearchExecutionContext configureContext(SearchExecutionContext context, boolean mapUnmappedFieldsAsString) {
|
||||
|
|
|
@ -87,7 +87,7 @@ public class QueryBuilderStoreTests extends ESTestCase {
|
|||
when(searchExecutionContext.getWriteableRegistry()).thenReturn(writableRegistry());
|
||||
when(searchExecutionContext.getParserConfig()).thenReturn(parserConfig());
|
||||
when(searchExecutionContext.getForField(fieldMapper.fieldType(), fielddataOperation)).thenReturn(
|
||||
new BytesBinaryIndexFieldData(fieldMapper.name(), CoreValuesSourceType.KEYWORD)
|
||||
new BytesBinaryIndexFieldData(fieldMapper.fullPath(), CoreValuesSourceType.KEYWORD)
|
||||
);
|
||||
when(searchExecutionContext.getFieldType(Mockito.anyString())).thenAnswer(invocation -> {
|
||||
final String fieldName = (String) invocation.getArguments()[0];
|
||||
|
|
|
@ -526,7 +526,7 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
if (value.length() > ignoreAbove) {
|
||||
context.addIgnoredField(name());
|
||||
context.addIgnoredField(fullPath());
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -580,11 +580,11 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
if (fieldType.stored()) {
|
||||
return new StringStoredFieldFieldLoader(name(), leafName(), null) {
|
||||
return new StringStoredFieldFieldLoader(fullPath(), leafName(), null) {
|
||||
@Override
|
||||
protected void write(XContentBuilder b, Object value) throws IOException {
|
||||
b.value((String) value);
|
||||
|
@ -602,7 +602,7 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
|
|||
Locale.ROOT,
|
||||
"field [%s] of type [%s] doesn't support synthetic source unless it is stored or has a sub-field of"
|
||||
+ " type [keyword] with doc values or stored and without a normalizer",
|
||||
name(),
|
||||
fullPath(),
|
||||
typeName()
|
||||
)
|
||||
);
|
||||
|
|
|
@ -116,7 +116,7 @@ public class Murmur3FieldMapper extends FieldMapper {
|
|||
final long hash = MurmurHash3.hash128(bytes.bytes, bytes.offset, bytes.length, 0, new MurmurHash3.Hash128()).h1;
|
||||
context.doc().add(new SortedNumericDocValuesField(fieldType().name(), hash));
|
||||
if (fieldType().isStored()) {
|
||||
context.doc().add(new StoredField(name(), hash));
|
||||
context.doc().add(new StoredField(fullPath(), hash));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ public class SizeFieldMapper extends MetadataFieldMapper {
|
|||
return;
|
||||
}
|
||||
final int value = context.sourceToParse().source().length();
|
||||
NumberType.INTEGER.addFields(context.doc(), name(), value, true, true, true);
|
||||
NumberType.INTEGER.addFields(context.doc(), fullPath(), value, true, true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -859,7 +859,7 @@ public class FieldCapabilitiesIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
return new StringStoredFieldFieldLoader(name(), leafName(), null) {
|
||||
return new StringStoredFieldFieldLoader(fullPath(), leafName(), null) {
|
||||
@Override
|
||||
protected void write(XContentBuilder b, Object value) throws IOException {
|
||||
BytesRef ref = (BytesRef) value;
|
||||
|
|
|
@ -124,7 +124,7 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte
|
|||
private static final ParseField MAPPING = new ParseField("mapping");
|
||||
|
||||
/**
|
||||
* Returns the mappings as a map. Note that the returned map has a single key which is always the field's {@link Mapper#name}.
|
||||
* Returns the mappings as a map. Note that the returned map has a single key which is always the field's {@link Mapper#fullPath}.
|
||||
*/
|
||||
public Map<String, Object> sourceAsMap() {
|
||||
return XContentHelper.convertToMap(source, true, XContentType.JSON).v2();
|
||||
|
|
|
@ -158,12 +158,12 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
|
|||
for (String field : request.fields()) {
|
||||
if (Regex.isMatchAllPattern(field)) {
|
||||
for (Mapper fieldMapper : mappingLookup.fieldMappers()) {
|
||||
addFieldMapper(fieldPredicate, fieldMapper.name(), fieldMapper, fieldMappings, request.includeDefaults());
|
||||
addFieldMapper(fieldPredicate, fieldMapper.fullPath(), fieldMapper, fieldMappings, request.includeDefaults());
|
||||
}
|
||||
} else if (Regex.isSimpleMatchPattern(field)) {
|
||||
for (Mapper fieldMapper : mappingLookup.fieldMappers()) {
|
||||
if (Regex.simpleMatch(field, fieldMapper.name())) {
|
||||
addFieldMapper(fieldPredicate, fieldMapper.name(), fieldMapper, fieldMappings, request.includeDefaults());
|
||||
if (Regex.simpleMatch(field, fieldMapper.fullPath())) {
|
||||
addFieldMapper(fieldPredicate, fieldMapper.fullPath(), fieldMapper, fieldMappings, request.includeDefaults());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -195,7 +195,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
|
|||
includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS,
|
||||
false
|
||||
);
|
||||
fieldMappings.put(field, new FieldMappingMetadata(fieldMapper.name(), bytes));
|
||||
fieldMappings.put(field, new FieldMappingMetadata(fieldMapper.fullPath(), bytes));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e);
|
||||
}
|
||||
|
|
|
@ -315,7 +315,7 @@ public abstract class Engine implements Closeable {
|
|||
for (Mapper mapper : mappingLookup.fieldMappers()) {
|
||||
if (mapper instanceof FieldMapper fieldMapper) {
|
||||
if (fieldMapper.fieldType() instanceof SparseVectorFieldMapper.SparseVectorFieldType) {
|
||||
mappers.put(fieldMapper.name(), fieldMapper);
|
||||
mappers.put(fieldMapper.fullPath(), fieldMapper);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -209,16 +209,20 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
if (hasDocValues == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
|
||||
"field ["
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support synthetic source because it doesn't have doc values"
|
||||
);
|
||||
}
|
||||
|
||||
return new BinaryDocValuesSyntheticFieldLoader(name()) {
|
||||
return new BinaryDocValuesSyntheticFieldLoader(fullPath()) {
|
||||
@Override
|
||||
protected void writeValue(XContentBuilder b, BytesRef value) throws IOException {
|
||||
var in = new ByteArrayStreamInput();
|
||||
|
|
|
@ -441,7 +441,7 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
context.addIgnoredField(mappedFieldType.name());
|
||||
if (storeMalformedFields) {
|
||||
// Save a copy of the field so synthetic source can load it
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser()));
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
|
||||
}
|
||||
} else {
|
||||
throw e;
|
||||
|
@ -505,15 +505,19 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
}
|
||||
if (hasDocValues == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
|
||||
"field ["
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support synthetic source because it doesn't have doc values"
|
||||
);
|
||||
}
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
return new SortedNumericDocValuesSyntheticFieldLoader(name(), leafName(), ignoreMalformed.value()) {
|
||||
return new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value()) {
|
||||
@Override
|
||||
protected void writeValue(XContentBuilder b, long value) throws IOException {
|
||||
b.value(value == 1);
|
||||
|
|
|
@ -929,7 +929,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
context.addIgnoredField(mappedFieldType.name());
|
||||
if (isSourceSynthetic) {
|
||||
// Save a copy of the field so synthetic source can load it
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser()));
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
|
@ -989,15 +989,19 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
}
|
||||
if (hasDocValues == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
|
||||
"field ["
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support synthetic source because it doesn't have doc values"
|
||||
);
|
||||
}
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
return new SortedNumericDocValuesSyntheticFieldLoader(name(), leafName(), ignoreMalformed) {
|
||||
return new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed) {
|
||||
@Override
|
||||
protected void writeValue(XContentBuilder b, long value) throws IOException {
|
||||
b.value(fieldType().format(value, fieldType().dateTimeFormatter()));
|
||||
|
|
|
@ -50,7 +50,7 @@ public class DocumentMapper {
|
|||
MapperMetrics mapperMetrics
|
||||
) {
|
||||
this.documentParser = documentParser;
|
||||
this.type = mapping.getRoot().name();
|
||||
this.type = mapping.getRoot().fullPath();
|
||||
this.mappingLookup = MappingLookup.fromMapping(mapping);
|
||||
this.mappingSource = source;
|
||||
this.mapperMetrics = mapperMetrics;
|
||||
|
|
|
@ -169,7 +169,7 @@ public final class DocumentParser {
|
|||
// the document reader, so to ensure that we don't run them multiple times we
|
||||
// guard them with an 'executed' boolean
|
||||
Map<String, Consumer<LeafReaderContext>> fieldScripts = new HashMap<>();
|
||||
indexTimeScriptMappers.forEach(mapper -> fieldScripts.put(mapper.name(), new Consumer<>() {
|
||||
indexTimeScriptMappers.forEach(mapper -> fieldScripts.put(mapper.fullPath(), new Consumer<>() {
|
||||
boolean executed = false;
|
||||
|
||||
@Override
|
||||
|
@ -250,7 +250,7 @@ public final class DocumentParser {
|
|||
return null;
|
||||
}
|
||||
RootObjectMapper.Builder rootBuilder = context.updateRoot();
|
||||
context.getDynamicMappers().forEach(mapper -> rootBuilder.addDynamic(mapper.name(), null, mapper, context));
|
||||
context.getDynamicMappers().forEach(mapper -> rootBuilder.addDynamic(mapper.fullPath(), null, mapper, context));
|
||||
|
||||
for (RuntimeField runtimeField : context.getDynamicRuntimeFields()) {
|
||||
rootBuilder.addRuntimeField(runtimeField);
|
||||
|
@ -294,7 +294,7 @@ public final class DocumentParser {
|
|||
Tuple<DocumentParserContext, XContentBuilder> tuple = XContentDataHelper.cloneSubContext(context);
|
||||
context.addIgnoredField(
|
||||
new IgnoredSourceFieldMapper.NameValue(
|
||||
context.parent().name(),
|
||||
context.parent().fullPath(),
|
||||
context.parent().fullPath().indexOf(context.parent().leafName()),
|
||||
XContentDataHelper.encodeXContentBuilder(tuple.v2()),
|
||||
context.doc()
|
||||
|
@ -327,7 +327,7 @@ public final class DocumentParser {
|
|||
throw new DocumentParsingException(
|
||||
context.parser().getTokenLocation(),
|
||||
"object mapping for ["
|
||||
+ mapper.name()
|
||||
+ mapper.fullPath()
|
||||
+ "] tried to parse field ["
|
||||
+ currentFieldName
|
||||
+ "] as object, but found a concrete value"
|
||||
|
@ -384,7 +384,7 @@ public final class DocumentParser {
|
|||
throw new DocumentParsingException(
|
||||
context.parser().getTokenLocation(),
|
||||
"object mapping for ["
|
||||
+ mapper.name()
|
||||
+ mapper.fullPath()
|
||||
+ "] tried to parse field ["
|
||||
+ context.parser().currentName()
|
||||
+ "] as object, but got EOF, has a concrete value been provided to it?"
|
||||
|
@ -440,7 +440,7 @@ public final class DocumentParser {
|
|||
context.addIgnoredField(
|
||||
IgnoredSourceFieldMapper.NameValue.fromContext(
|
||||
context,
|
||||
fieldMapper.name(),
|
||||
fieldMapper.fullPath(),
|
||||
XContentDataHelper.encodeXContentBuilder(contextWithSourceToStore.v2())
|
||||
)
|
||||
);
|
||||
|
@ -476,14 +476,14 @@ public final class DocumentParser {
|
|||
|
||||
private static void throwOnUnrecognizedMapperType(Mapper mapper) {
|
||||
throw new IllegalStateException(
|
||||
"The provided mapper [" + mapper.name() + "] has an unrecognized type [" + mapper.getClass().getSimpleName() + "]."
|
||||
"The provided mapper [" + mapper.fullPath() + "] has an unrecognized type [" + mapper.getClass().getSimpleName() + "]."
|
||||
);
|
||||
}
|
||||
|
||||
private static void throwOnCopyToOnFieldAlias(DocumentParserContext context, Mapper mapper) {
|
||||
throw new DocumentParsingException(
|
||||
context.parser().getTokenLocation(),
|
||||
"Cannot " + (context.isWithinCopyTo() ? "copy" : "write") + " to a field alias [" + mapper.name() + "]."
|
||||
"Cannot " + (context.isWithinCopyTo() ? "copy" : "write") + " to a field alias [" + mapper.fullPath() + "]."
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -491,7 +491,7 @@ public final class DocumentParser {
|
|||
throw new DocumentParsingException(
|
||||
context.parser().getTokenLocation(),
|
||||
"Cannot copy field ["
|
||||
+ mapper.name()
|
||||
+ mapper.fullPath()
|
||||
+ "] to fields "
|
||||
+ copyToFields
|
||||
+ ". Copy-to currently only works for value-type fields, not objects."
|
||||
|
@ -564,7 +564,7 @@ public final class DocumentParser {
|
|||
"Tried to add nested object ["
|
||||
+ dynamicObjectMapper.leafName()
|
||||
+ "] to object ["
|
||||
+ context.parent().name()
|
||||
+ context.parent().fullPath()
|
||||
+ "] which does not support subobjects"
|
||||
);
|
||||
}
|
||||
|
@ -594,7 +594,7 @@ public final class DocumentParser {
|
|||
private static void throwOnCreateDynamicNestedViaCopyTo(Mapper dynamicObjectMapper, DocumentParserContext context) {
|
||||
throw new DocumentParsingException(
|
||||
context.parser().getTokenLocation(),
|
||||
"It is forbidden to create dynamic nested objects ([" + dynamicObjectMapper.name() + "]) through `copy_to`"
|
||||
"It is forbidden to create dynamic nested objects ([" + dynamicObjectMapper.fullPath() + "]) through `copy_to`"
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -759,7 +759,7 @@ public final class DocumentParser {
|
|||
throw new DocumentParsingException(
|
||||
context.parser().getTokenLocation(),
|
||||
"object mapping for ["
|
||||
+ context.parent().name()
|
||||
+ context.parent().fullPath()
|
||||
+ "] with array for ["
|
||||
+ arrayFieldName
|
||||
+ "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?"
|
||||
|
@ -782,7 +782,7 @@ public final class DocumentParser {
|
|||
throw new DocumentParsingException(
|
||||
context.parser().getTokenLocation(),
|
||||
"object mapping ["
|
||||
+ context.parent().name()
|
||||
+ context.parent().fullPath()
|
||||
+ "] trying to serialize a value with"
|
||||
+ " no field associated with it, current value ["
|
||||
+ context.parser().textOrNull()
|
||||
|
@ -937,7 +937,7 @@ public final class DocumentParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
public String fullPath() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
|
|
@ -373,15 +373,15 @@ public abstract class DocumentParserContext {
|
|||
public final boolean addDynamicMapper(Mapper mapper) {
|
||||
// eagerly check object depth limit here to avoid stack overflow errors
|
||||
if (mapper instanceof ObjectMapper) {
|
||||
MappingLookup.checkObjectDepthLimit(indexSettings().getMappingDepthLimit(), mapper.name());
|
||||
MappingLookup.checkObjectDepthLimit(indexSettings().getMappingDepthLimit(), mapper.fullPath());
|
||||
}
|
||||
|
||||
// eagerly check field name limit here to avoid OOM errors
|
||||
// only check fields that are not already mapped or tracked in order to avoid hitting field limit too early via double-counting
|
||||
// note that existing fields can also receive dynamic mapping updates (e.g. constant_keyword to fix the value)
|
||||
if (mappingLookup.getMapper(mapper.name()) == null
|
||||
&& mappingLookup.objectMappers().containsKey(mapper.name()) == false
|
||||
&& dynamicMappers.containsKey(mapper.name()) == false) {
|
||||
if (mappingLookup.getMapper(mapper.fullPath()) == null
|
||||
&& mappingLookup.objectMappers().containsKey(mapper.fullPath()) == false
|
||||
&& dynamicMappers.containsKey(mapper.fullPath()) == false) {
|
||||
int mapperSize = mapper.getTotalFieldsCount();
|
||||
int additionalFieldsToAdd = getNewFieldsSize() + mapperSize;
|
||||
if (indexSettings().isIgnoreDynamicFieldsBeyondLimit()) {
|
||||
|
@ -391,15 +391,15 @@ public abstract class DocumentParserContext {
|
|||
addIgnoredField(
|
||||
IgnoredSourceFieldMapper.NameValue.fromContext(
|
||||
this,
|
||||
mapper.name(),
|
||||
mapper.fullPath(),
|
||||
XContentDataHelper.encodeToken(parser())
|
||||
)
|
||||
);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("failed to parse field [" + mapper.name() + " ]", e);
|
||||
throw new IllegalArgumentException("failed to parse field [" + mapper.fullPath() + " ]", e);
|
||||
}
|
||||
}
|
||||
addIgnoredField(mapper.name());
|
||||
addIgnoredField(mapper.fullPath());
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
|
@ -408,7 +408,7 @@ public abstract class DocumentParserContext {
|
|||
dynamicMappersSize.add(mapperSize);
|
||||
}
|
||||
if (mapper instanceof ObjectMapper objectMapper) {
|
||||
dynamicObjectMappers.put(objectMapper.name(), objectMapper);
|
||||
dynamicObjectMappers.put(objectMapper.fullPath(), objectMapper);
|
||||
// dynamic object mappers may have been obtained from applying a dynamic template, in which case their definition may contain
|
||||
// sub-fields as well as sub-objects that need to be added to the mappings
|
||||
for (Mapper submapper : objectMapper.mappers.values()) {
|
||||
|
@ -425,7 +425,7 @@ public abstract class DocumentParserContext {
|
|||
// dynamically mapped objects when the incoming document defines no sub-fields in them:
|
||||
// 1) by default, they would be empty containers in the mappings, is it then important to map them?
|
||||
// 2) they can be the result of applying a dynamic template which may define sub-fields or set dynamic, enabled or subobjects.
|
||||
dynamicMappers.computeIfAbsent(mapper.name(), k -> new ArrayList<>()).add(mapper);
|
||||
dynamicMappers.computeIfAbsent(mapper.fullPath(), k -> new ArrayList<>()).add(mapper);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -32,16 +32,16 @@ public final class FieldAliasMapper extends Mapper {
|
|||
}
|
||||
|
||||
private final String name;
|
||||
private final String path;
|
||||
private final String targetPath;
|
||||
|
||||
public FieldAliasMapper(String simpleName, String name, String path) {
|
||||
public FieldAliasMapper(String simpleName, String name, String targetPath) {
|
||||
super(simpleName);
|
||||
this.name = Mapper.internFieldName(name);
|
||||
this.path = path;
|
||||
this.targetPath = targetPath;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
public String fullPath() {
|
||||
return name;
|
||||
}
|
||||
|
||||
|
@ -50,15 +50,15 @@ public final class FieldAliasMapper extends Mapper {
|
|||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
public String path() {
|
||||
return path;
|
||||
public String targetPath() {
|
||||
return targetPath;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext) {
|
||||
if ((mergeWith instanceof FieldAliasMapper) == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"Cannot merge a field alias mapping [" + name() + "] with a mapping that is not for a field alias."
|
||||
"Cannot merge a field alias mapping [" + fullPath() + "] with a mapping that is not for a field alias."
|
||||
);
|
||||
}
|
||||
return mergeWith;
|
||||
|
@ -71,37 +71,37 @@ public final class FieldAliasMapper extends Mapper {
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder.startObject(leafName()).field("type", CONTENT_TYPE).field(Names.PATH, path).endObject();
|
||||
return builder.startObject(leafName()).field("type", CONTENT_TYPE).field(Names.PATH, targetPath).endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate(MappingLookup mappers) {
|
||||
if (Objects.equals(this.path(), this.name())) {
|
||||
if (Objects.equals(this.targetPath(), this.fullPath())) {
|
||||
throw new MapperParsingException(
|
||||
"Invalid [path] value [" + path + "] for field alias [" + name() + "]: an alias cannot refer to itself."
|
||||
"Invalid [path] value [" + targetPath + "] for field alias [" + fullPath() + "]: an alias cannot refer to itself."
|
||||
);
|
||||
}
|
||||
if (mappers.fieldTypesLookup().get(path) == null) {
|
||||
if (mappers.fieldTypesLookup().get(targetPath) == null) {
|
||||
throw new MapperParsingException(
|
||||
"Invalid [path] value ["
|
||||
+ path
|
||||
+ targetPath
|
||||
+ "] for field alias ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "]: an alias must refer to an existing field in the mappings."
|
||||
);
|
||||
}
|
||||
if (mappers.getMapper(path) instanceof FieldAliasMapper) {
|
||||
if (mappers.getMapper(targetPath) instanceof FieldAliasMapper) {
|
||||
throw new MapperParsingException(
|
||||
"Invalid [path] value [" + path + "] for field alias [" + name() + "]: an alias cannot refer to another alias."
|
||||
"Invalid [path] value [" + targetPath + "] for field alias [" + fullPath() + "]: an alias cannot refer to another alias."
|
||||
);
|
||||
}
|
||||
String aliasScope = mappers.nestedLookup().getNestedParent(name);
|
||||
String pathScope = mappers.nestedLookup().getNestedParent(path);
|
||||
String pathScope = mappers.nestedLookup().getNestedParent(targetPath);
|
||||
|
||||
if (Objects.equals(aliasScope, pathScope) == false) {
|
||||
StringBuilder message = new StringBuilder(
|
||||
"Invalid [path] value ["
|
||||
+ path
|
||||
+ targetPath
|
||||
+ "] for field alias ["
|
||||
+ name
|
||||
+ "]: an alias must have the same nested scope as its target. "
|
||||
|
|
|
@ -118,7 +118,7 @@ public abstract class FieldMapper extends Mapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
public String fullPath() {
|
||||
return fieldType().name();
|
||||
}
|
||||
|
||||
|
@ -277,9 +277,9 @@ public abstract class FieldMapper extends Mapper {
|
|||
indexScriptValues(searchLookup, readerContext, doc, documentParserContext);
|
||||
} catch (Exception e) {
|
||||
if (onScriptError == OnScriptError.CONTINUE) {
|
||||
documentParserContext.addIgnoredField(name());
|
||||
documentParserContext.addIgnoredField(fullPath());
|
||||
} else {
|
||||
throw new DocumentParsingException(XContentLocation.UNKNOWN, "Error executing script on field [" + name() + "]", e);
|
||||
throw new DocumentParsingException(XContentLocation.UNKNOWN, "Error executing script on field [" + fullPath() + "]", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -299,7 +299,7 @@ public abstract class FieldMapper extends Mapper {
|
|||
int doc,
|
||||
DocumentParserContext documentParserContext
|
||||
) {
|
||||
throw new UnsupportedOperationException("FieldMapper " + name() + " does not support [script]");
|
||||
throw new UnsupportedOperationException("FieldMapper " + fullPath() + " does not support [script]");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -321,11 +321,11 @@ public abstract class FieldMapper extends Mapper {
|
|||
@Override
|
||||
public final void validate(MappingLookup mappers) {
|
||||
if (this.copyTo() != null && this.copyTo().copyToFields().isEmpty() == false) {
|
||||
if (mappers.isMultiField(this.name())) {
|
||||
throw new IllegalArgumentException("[copy_to] may not be used to copy from a multi-field: [" + this.name() + "]");
|
||||
if (mappers.isMultiField(this.fullPath())) {
|
||||
throw new IllegalArgumentException("[copy_to] may not be used to copy from a multi-field: [" + this.fullPath() + "]");
|
||||
}
|
||||
|
||||
final String sourceScope = mappers.nestedLookup().getNestedParent(this.name());
|
||||
final String sourceScope = mappers.nestedLookup().getNestedParent(this.fullPath());
|
||||
for (String copyTo : this.copyTo().copyToFields()) {
|
||||
if (mappers.isMultiField(copyTo)) {
|
||||
throw new IllegalArgumentException("[copy_to] may not be used to copy to a multi-field: [" + copyTo + "]");
|
||||
|
@ -381,7 +381,7 @@ public abstract class FieldMapper extends Mapper {
|
|||
if (mergeWith instanceof FieldMapper == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"mapper ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "] cannot be changed from type ["
|
||||
+ contentType()
|
||||
+ "] to ["
|
||||
|
@ -395,7 +395,7 @@ public abstract class FieldMapper extends Mapper {
|
|||
if (builder == null) {
|
||||
return (FieldMapper) mergeWith;
|
||||
}
|
||||
Conflicts conflicts = new Conflicts(name());
|
||||
Conflicts conflicts = new Conflicts(fullPath());
|
||||
builder.merge((FieldMapper) mergeWith, conflicts, mapperMergeContext);
|
||||
conflicts.check();
|
||||
return builder.build(mapperMergeContext.getMapperBuilderContext());
|
||||
|
@ -404,12 +404,12 @@ public abstract class FieldMapper extends Mapper {
|
|||
protected void checkIncomingMergeType(FieldMapper mergeWith) {
|
||||
if (Objects.equals(this.getClass(), mergeWith.getClass()) == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"mapper [" + name() + "] cannot be changed from type [" + contentType() + "] to [" + mergeWith.contentType() + "]"
|
||||
"mapper [" + fullPath() + "] cannot be changed from type [" + contentType() + "] to [" + mergeWith.contentType() + "]"
|
||||
);
|
||||
}
|
||||
if (Objects.equals(contentType(), mergeWith.contentType()) == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"mapper [" + name() + "] cannot be changed from type [" + contentType() + "] to [" + mergeWith.contentType() + "]"
|
||||
"mapper [" + fullPath() + "] cannot be changed from type [" + contentType() + "] to [" + mergeWith.contentType() + "]"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -489,7 +489,7 @@ public abstract class FieldMapper extends Mapper {
|
|||
if (syntheticSourceMode() == SyntheticSourceMode.FALLBACK) {
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
// Nothing because it is handled at `ObjectMapper` level.
|
||||
|
@ -574,7 +574,7 @@ public abstract class FieldMapper extends Mapper {
|
|||
private MultiFields(FieldMapper[] mappers) {
|
||||
this.mappers = mappers;
|
||||
// sort for consistent iteration order + serialization
|
||||
Arrays.sort(this.mappers, Comparator.comparing(FieldMapper::name));
|
||||
Arrays.sort(this.mappers, Comparator.comparing(FieldMapper::fullPath));
|
||||
}
|
||||
|
||||
public void parse(FieldMapper mainField, DocumentParserContext context, Supplier<DocumentParserContext> multiFieldContextSupplier)
|
||||
|
|
|
@ -55,10 +55,10 @@ final class FieldTypeLookup {
|
|||
final Map<String, DynamicFieldType> dynamicFieldTypes = new HashMap<>();
|
||||
final Map<String, Set<String>> fieldToCopiedFields = new HashMap<>();
|
||||
for (FieldMapper fieldMapper : fieldMappers) {
|
||||
String fieldName = fieldMapper.name();
|
||||
String fieldName = fieldMapper.fullPath();
|
||||
MappedFieldType fieldType = fieldMapper.fieldType();
|
||||
fullNameToFieldType.put(fieldType.name(), fieldType);
|
||||
fieldMapper.sourcePathUsedBy().forEachRemaining(mapper -> fullSubfieldNameToParentPath.put(mapper.name(), fieldName));
|
||||
fieldMapper.sourcePathUsedBy().forEachRemaining(mapper -> fullSubfieldNameToParentPath.put(mapper.fullPath(), fieldName));
|
||||
if (fieldType instanceof DynamicFieldType) {
|
||||
dynamicFieldTypes.put(fieldType.name(), (DynamicFieldType) fieldType);
|
||||
}
|
||||
|
@ -80,8 +80,8 @@ final class FieldTypeLookup {
|
|||
this.maxParentPathDots = maxParentPathDots;
|
||||
|
||||
for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) {
|
||||
String aliasName = fieldAliasMapper.name();
|
||||
String path = fieldAliasMapper.path();
|
||||
String aliasName = fieldAliasMapper.fullPath();
|
||||
String path = fieldAliasMapper.targetPath();
|
||||
MappedFieldType fieldType = fullNameToFieldType.get(path);
|
||||
if (fieldType == null) {
|
||||
continue;
|
||||
|
|
|
@ -612,7 +612,7 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<GeoPoi
|
|||
throws IOException {
|
||||
super.onMalformedValue(context, malformedDataForSyntheticSource, cause);
|
||||
if (malformedDataForSyntheticSource != null) {
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), malformedDataForSyntheticSource));
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), malformedDataForSyntheticSource));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -628,15 +628,19 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<GeoPoi
|
|||
}
|
||||
if (fieldType().hasDocValues() == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
|
||||
"field ["
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support synthetic source because it doesn't have doc values"
|
||||
);
|
||||
}
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
return new SortedNumericDocValuesSyntheticFieldLoader(name(), leafName(), ignoreMalformed()) {
|
||||
return new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed()) {
|
||||
final GeoPoint point = new GeoPoint();
|
||||
|
||||
@Override
|
||||
|
|
|
@ -539,7 +539,7 @@ public class IpFieldMapper extends FieldMapper {
|
|||
context.addIgnoredField(fieldType().name());
|
||||
if (storeIgnored) {
|
||||
// Save a copy of the field so synthetic source can load it
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser()));
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
|
@ -594,9 +594,9 @@ public class IpFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public void doValidate(MappingLookup lookup) {
|
||||
if (dimension && null != lookup.nestedLookup().getNestedParent(name())) {
|
||||
if (dimension && null != lookup.nestedLookup().getNestedParent(fullPath())) {
|
||||
throw new IllegalArgumentException(
|
||||
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]"
|
||||
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + fullPath() + "]"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -613,15 +613,19 @@ public class IpFieldMapper extends FieldMapper {
|
|||
}
|
||||
if (hasDocValues == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
|
||||
"field ["
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support synthetic source because it doesn't have doc values"
|
||||
);
|
||||
}
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
return new SortedSetDocValuesSyntheticFieldLoader(name(), leafName(), null, ignoreMalformed) {
|
||||
return new SortedSetDocValuesSyntheticFieldLoader(fullPath(), leafName(), null, ignoreMalformed) {
|
||||
@Override
|
||||
protected BytesRef convert(BytesRef value) {
|
||||
byte[] bytes = Arrays.copyOfRange(value.bytes, value.offset, value.offset + value.length);
|
||||
|
|
|
@ -911,7 +911,7 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
if (value.length() > fieldType().ignoreAbove()) {
|
||||
context.addIgnoredField(name());
|
||||
context.addIgnoredField(fullPath());
|
||||
if (storeIgnored) {
|
||||
// Save a copy of the field so synthetic source can load it
|
||||
context.doc().add(new StoredField(originalName(), new BytesRef(value)));
|
||||
|
@ -919,7 +919,7 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
return;
|
||||
}
|
||||
|
||||
value = normalizeValue(fieldType().normalizer(), name(), value);
|
||||
value = normalizeValue(fieldType().normalizer(), fullPath(), value);
|
||||
|
||||
// convert to utf8 only once before feeding postings/dv/stored fields
|
||||
final BytesRef binaryValue = new BytesRef(value);
|
||||
|
@ -1000,9 +1000,9 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public void doValidate(MappingLookup lookup) {
|
||||
if (fieldType().isDimension() && null != lookup.nestedLookup().getNestedParent(name())) {
|
||||
if (fieldType().isDimension() && null != lookup.nestedLookup().getNestedParent(fullPath())) {
|
||||
throw new IllegalArgumentException(
|
||||
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]"
|
||||
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + fullPath() + "]"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1017,7 +1017,7 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
* for synthetic source.
|
||||
*/
|
||||
private String originalName() {
|
||||
return name() + "._original";
|
||||
return fullPath() + "._original";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1036,17 +1036,17 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
}
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
if (hasNormalizer()) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares a normalizer"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares a normalizer"
|
||||
);
|
||||
}
|
||||
if (fieldType.stored()) {
|
||||
return new StringStoredFieldFieldLoader(
|
||||
name(),
|
||||
fullPath(),
|
||||
simpleName,
|
||||
fieldType().ignoreAbove == Defaults.IGNORE_ABOVE ? null : originalName()
|
||||
) {
|
||||
|
@ -1060,14 +1060,14 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
if (hasDocValues == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"field ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support synthetic source because it doesn't have doc values and isn't stored"
|
||||
);
|
||||
}
|
||||
return new SortedSetDocValuesSyntheticFieldLoader(
|
||||
name(),
|
||||
fullPath(),
|
||||
simpleName,
|
||||
fieldType().ignoreAbove == Defaults.IGNORE_ABOVE ? null : originalName(),
|
||||
false
|
||||
|
|
|
@ -60,13 +60,17 @@ public abstract class Mapper implements ToXContentFragment, Iterable<Mapper> {
|
|||
this.leafName = internFieldName(leafName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the field.
|
||||
* When the field has a parent object, its leaf name won't include the entire path.
|
||||
* When subobjects are disabled, its leaf name will be the same as {@link #fullPath()} in practice, because its parent is the root.
|
||||
*/
|
||||
public final String leafName() {
|
||||
return leafName;
|
||||
}
|
||||
|
||||
/** Returns the canonical name which uniquely identifies the mapper against other mappers in a type. */
|
||||
// TODO rename this to fullPath???
|
||||
public abstract String name();
|
||||
public abstract String fullPath();
|
||||
|
||||
/**
|
||||
* Returns a name representing the type of this mapper.
|
||||
|
@ -94,7 +98,7 @@ public abstract class Mapper implements ToXContentFragment, Iterable<Mapper> {
|
|||
* fields properly.
|
||||
*/
|
||||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
throw new IllegalArgumentException("field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source");
|
||||
throw new IllegalArgumentException("field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -52,11 +52,11 @@ public final class Mapping implements ToXContentFragment {
|
|||
for (int i = 0; i < metadataMappers.length; i++) {
|
||||
MetadataFieldMapper metadataMapper = metadataMappers[i];
|
||||
metadataMappersMap[i] = Map.entry(metadataMapper.getClass(), metadataMapper);
|
||||
metadataMappersByName[i] = Map.entry(metadataMapper.name(), metadataMapper);
|
||||
metadataMappersByName[i] = Map.entry(metadataMapper.fullPath(), metadataMapper);
|
||||
}
|
||||
this.root = rootObjectMapper;
|
||||
// keep root mappers sorted for consistent serialization
|
||||
Arrays.sort(metadataMappers, Comparator.comparing(Mapper::name));
|
||||
Arrays.sort(metadataMappers, Comparator.comparing(Mapper::fullPath));
|
||||
this.metadataMappersMap = Map.ofEntries(metadataMappersMap);
|
||||
this.metadataMappersByName = Map.ofEntries(metadataMappersByName);
|
||||
this.meta = meta;
|
||||
|
@ -70,7 +70,7 @@ public final class Mapping implements ToXContentFragment {
|
|||
try {
|
||||
return new CompressedXContent(this);
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchGenerationException("failed to serialize source for type [" + root.name() + "]", e);
|
||||
throw new ElasticsearchGenerationException("failed to serialize source for type [" + root.fullPath() + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -163,27 +163,27 @@ public final class MappingLookup {
|
|||
final Set<String> completionFields = new HashSet<>();
|
||||
final List<FieldMapper> indexTimeScriptMappers = new ArrayList<>();
|
||||
for (FieldMapper mapper : mappers) {
|
||||
if (objects.containsKey(mapper.name())) {
|
||||
throw new MapperParsingException("Field [" + mapper.name() + "] is defined both as an object and a field");
|
||||
if (objects.containsKey(mapper.fullPath())) {
|
||||
throw new MapperParsingException("Field [" + mapper.fullPath() + "] is defined both as an object and a field");
|
||||
}
|
||||
if (fieldMappers.put(mapper.name(), mapper) != null) {
|
||||
throw new MapperParsingException("Field [" + mapper.name() + "] is defined more than once");
|
||||
if (fieldMappers.put(mapper.fullPath(), mapper) != null) {
|
||||
throw new MapperParsingException("Field [" + mapper.fullPath() + "] is defined more than once");
|
||||
}
|
||||
indexAnalyzersMap.putAll(mapper.indexAnalyzers());
|
||||
if (mapper.hasScript()) {
|
||||
indexTimeScriptMappers.add(mapper);
|
||||
}
|
||||
if (mapper instanceof CompletionFieldMapper) {
|
||||
completionFields.add(mapper.name());
|
||||
completionFields.add(mapper.fullPath());
|
||||
}
|
||||
}
|
||||
|
||||
for (FieldAliasMapper aliasMapper : aliasMappers) {
|
||||
if (objects.containsKey(aliasMapper.name())) {
|
||||
throw new MapperParsingException("Alias [" + aliasMapper.name() + "] is defined both as an object and an alias");
|
||||
if (objects.containsKey(aliasMapper.fullPath())) {
|
||||
throw new MapperParsingException("Alias [" + aliasMapper.fullPath() + "] is defined both as an object and an alias");
|
||||
}
|
||||
if (fieldMappers.put(aliasMapper.name(), aliasMapper) != null) {
|
||||
throw new MapperParsingException("Alias [" + aliasMapper.name() + "] is defined both as an alias and a concrete field");
|
||||
if (fieldMappers.put(aliasMapper.fullPath(), aliasMapper) != null) {
|
||||
throw new MapperParsingException("Alias [" + aliasMapper.fullPath() + "] is defined both as an alias and a concrete field");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -194,7 +194,7 @@ public final class MappingLookup {
|
|||
Map<String, InferenceFieldMetadata> inferenceFields = new HashMap<>();
|
||||
for (FieldMapper mapper : mappers) {
|
||||
if (mapper instanceof InferenceFieldMapper inferenceFieldMapper) {
|
||||
inferenceFields.put(mapper.name(), inferenceFieldMapper.getMetadata(fieldTypeLookup.sourcePaths(mapper.name())));
|
||||
inferenceFields.put(mapper.fullPath(), inferenceFieldMapper.getMetadata(fieldTypeLookup.sourcePaths(mapper.fullPath())));
|
||||
}
|
||||
}
|
||||
this.inferenceFields = Map.copyOf(inferenceFields);
|
||||
|
@ -225,7 +225,7 @@ public final class MappingLookup {
|
|||
|
||||
private static void assertNamesInterned(String name, Mapper mapper) {
|
||||
assert name == name.intern();
|
||||
assert mapper.name() == mapper.name().intern();
|
||||
assert mapper.fullPath() == mapper.fullPath().intern();
|
||||
assert mapper.leafName() == mapper.leafName().intern();
|
||||
if (mapper instanceof ObjectMapper) {
|
||||
((ObjectMapper) mapper).mappers.forEach(MappingLookup::assertNamesInterned);
|
||||
|
|
|
@ -197,7 +197,7 @@ public abstract class MetadataFieldMapper extends FieldMapper {
|
|||
protected void parseCreateField(DocumentParserContext context) throws IOException {
|
||||
throw new DocumentParsingException(
|
||||
context.parser().getTokenLocation(),
|
||||
"Field [" + name() + "] is a metadata field and cannot be added inside a document. Use the index API request parameters."
|
||||
"Field [" + fullPath() + "] is a metadata field and cannot be added inside a document. Use the index API request parameters."
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -82,20 +82,20 @@ public interface NestedLookup {
|
|||
if (mappers == null || mappers.isEmpty()) {
|
||||
return NestedLookup.EMPTY;
|
||||
}
|
||||
mappers = mappers.stream().sorted(Comparator.comparing(ObjectMapper::name)).toList();
|
||||
mappers = mappers.stream().sorted(Comparator.comparing(ObjectMapper::fullPath)).toList();
|
||||
Map<String, Query> parentFilters = new HashMap<>();
|
||||
Map<String, NestedObjectMapper> mappersByName = new HashMap<>();
|
||||
NestedObjectMapper previous = null;
|
||||
for (NestedObjectMapper mapper : mappers) {
|
||||
mappersByName.put(mapper.name(), mapper);
|
||||
mappersByName.put(mapper.fullPath(), mapper);
|
||||
if (previous != null) {
|
||||
if (mapper.name().startsWith(previous.name() + ".")) {
|
||||
parentFilters.put(previous.name(), previous.nestedTypeFilter());
|
||||
if (mapper.fullPath().startsWith(previous.fullPath() + ".")) {
|
||||
parentFilters.put(previous.fullPath(), previous.nestedTypeFilter());
|
||||
}
|
||||
}
|
||||
previous = mapper;
|
||||
}
|
||||
List<String> nestedPathNames = mappers.stream().map(NestedObjectMapper::name).toList();
|
||||
List<String> nestedPathNames = mappers.stream().map(NestedObjectMapper::fullPath).toList();
|
||||
|
||||
return new NestedLookup() {
|
||||
|
||||
|
|
|
@ -305,7 +305,7 @@ public class NestedObjectMapper extends ObjectMapper {
|
|||
@Override
|
||||
public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) {
|
||||
if ((mergeWith instanceof NestedObjectMapper) == false) {
|
||||
MapperErrors.throwNestedMappingConflictError(mergeWith.name());
|
||||
MapperErrors.throwNestedMappingConflictError(mergeWith.fullPath());
|
||||
}
|
||||
NestedObjectMapper mergeWithObject = (NestedObjectMapper) mergeWith;
|
||||
|
||||
|
@ -481,7 +481,7 @@ public class NestedObjectMapper extends ObjectMapper {
|
|||
|
||||
@Override
|
||||
public String fieldName() {
|
||||
return name();
|
||||
return NestedObjectMapper.this.fullPath();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1900,7 +1900,7 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
context.addIgnoredField(mappedFieldType.name());
|
||||
if (storeMalformedFields) {
|
||||
// Save a copy of the field so synthetic source can load it
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser()));
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
|
@ -1979,9 +1979,9 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public void doValidate(MappingLookup lookup) {
|
||||
if (dimension && null != lookup.nestedLookup().getNestedParent(name())) {
|
||||
if (dimension && null != lookup.nestedLookup().getNestedParent(fullPath())) {
|
||||
throw new IllegalArgumentException(
|
||||
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]"
|
||||
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + fullPath() + "]"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1998,15 +1998,19 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
}
|
||||
if (hasDocValues == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
|
||||
"field ["
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support synthetic source because it doesn't have doc values"
|
||||
);
|
||||
}
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
return type.syntheticFieldLoader(name(), leafName(), ignoreMalformed.value());
|
||||
return type.syntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value());
|
||||
}
|
||||
|
||||
// For testing only:
|
||||
|
|
|
@ -433,7 +433,7 @@ public class ObjectMapper extends Mapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
public String fullPath() {
|
||||
return this.fullPath;
|
||||
}
|
||||
|
||||
|
@ -459,10 +459,6 @@ public class ObjectMapper extends Mapper {
|
|||
return mappers.values().iterator();
|
||||
}
|
||||
|
||||
public String fullPath() {
|
||||
return this.fullPath;
|
||||
}
|
||||
|
||||
public final Dynamic dynamic() {
|
||||
return dynamic;
|
||||
}
|
||||
|
@ -492,11 +488,11 @@ public class ObjectMapper extends Mapper {
|
|||
@Override
|
||||
public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) {
|
||||
if (mergeWith instanceof ObjectMapper == false) {
|
||||
MapperErrors.throwObjectMappingConflictError(mergeWith.name());
|
||||
MapperErrors.throwObjectMappingConflictError(mergeWith.fullPath());
|
||||
}
|
||||
if (this instanceof NestedObjectMapper == false && mergeWith instanceof NestedObjectMapper) {
|
||||
// TODO stop NestedObjectMapper extending ObjectMapper?
|
||||
MapperErrors.throwNestedMappingConflictError(mergeWith.name());
|
||||
MapperErrors.throwNestedMappingConflictError(mergeWith.fullPath());
|
||||
}
|
||||
var mergeResult = MergeResult.build(this, (ObjectMapper) mergeWith, parentMergeContext);
|
||||
return new ObjectMapper(
|
||||
|
@ -524,7 +520,9 @@ public class ObjectMapper extends Mapper {
|
|||
if (reason == MergeReason.INDEX_TEMPLATE) {
|
||||
enabled = mergeWithObject.enabled;
|
||||
} else if (existing.isEnabled() != mergeWithObject.isEnabled()) {
|
||||
throw new MapperException("the [enabled] parameter can't be updated for the object mapping [" + existing.name() + "]");
|
||||
throw new MapperException(
|
||||
"the [enabled] parameter can't be updated for the object mapping [" + existing.fullPath() + "]"
|
||||
);
|
||||
} else {
|
||||
enabled = existing.enabled;
|
||||
}
|
||||
|
@ -537,7 +535,7 @@ public class ObjectMapper extends Mapper {
|
|||
subObjects = mergeWithObject.subobjects;
|
||||
} else if (existing.subobjects != mergeWithObject.subobjects) {
|
||||
throw new MapperException(
|
||||
"the [subobjects] parameter can't be updated for the object mapping [" + existing.name() + "]"
|
||||
"the [subobjects] parameter can't be updated for the object mapping [" + existing.fullPath() + "]"
|
||||
);
|
||||
} else {
|
||||
subObjects = existing.subobjects;
|
||||
|
@ -551,7 +549,7 @@ public class ObjectMapper extends Mapper {
|
|||
trackArraySource = mergeWithObject.storeArraySource;
|
||||
} else if (existing.storeArraySource != mergeWithObject.storeArraySource) {
|
||||
throw new MapperException(
|
||||
"the [store_array_source] parameter can't be updated for the object mapping [" + existing.name() + "]"
|
||||
"the [store_array_source] parameter can't be updated for the object mapping [" + existing.fullPath() + "]"
|
||||
);
|
||||
} else {
|
||||
trackArraySource = existing.storeArraySource;
|
||||
|
@ -606,9 +604,9 @@ public class ObjectMapper extends Mapper {
|
|||
} else {
|
||||
assert mergeIntoMapper instanceof FieldMapper || mergeIntoMapper instanceof FieldAliasMapper;
|
||||
if (mergeWithMapper instanceof NestedObjectMapper) {
|
||||
MapperErrors.throwNestedMappingConflictError(mergeWithMapper.name());
|
||||
MapperErrors.throwNestedMappingConflictError(mergeWithMapper.fullPath());
|
||||
} else if (mergeWithMapper instanceof ObjectMapper) {
|
||||
MapperErrors.throwObjectMappingConflictError(mergeWithMapper.name());
|
||||
MapperErrors.throwObjectMappingConflictError(mergeWithMapper.fullPath());
|
||||
}
|
||||
|
||||
// If we're merging template mappings when creating an index, then a field definition always
|
||||
|
@ -736,7 +734,7 @@ public class ObjectMapper extends Mapper {
|
|||
protected void serializeMappers(XContentBuilder builder, Params params) throws IOException {
|
||||
// sort the mappers so we get consistent serialization format
|
||||
Mapper[] sortedMappers = mappers.values().toArray(Mapper[]::new);
|
||||
Arrays.sort(sortedMappers, Comparator.comparing(Mapper::name));
|
||||
Arrays.sort(sortedMappers, Comparator.comparing(Mapper::fullPath));
|
||||
|
||||
int count = 0;
|
||||
for (Mapper mapper : sortedMappers) {
|
||||
|
@ -757,7 +755,7 @@ public class ObjectMapper extends Mapper {
|
|||
}
|
||||
|
||||
protected SourceLoader.SyntheticFieldLoader syntheticFieldLoader(Stream<Mapper> mappers, boolean isFragment) {
|
||||
var fields = mappers.sorted(Comparator.comparing(Mapper::name))
|
||||
var fields = mappers.sorted(Comparator.comparing(Mapper::fullPath))
|
||||
.map(Mapper::syntheticFieldLoader)
|
||||
.filter(l -> l != SourceLoader.SyntheticFieldLoader.NOTHING)
|
||||
.toList();
|
||||
|
@ -887,7 +885,7 @@ public class ObjectMapper extends Mapper {
|
|||
if (objectsWithIgnoredFields == null || objectsWithIgnoredFields.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
ignoredValues = objectsWithIgnoredFields.remove(name());
|
||||
ignoredValues = objectsWithIgnoredFields.remove(ObjectMapper.this.fullPath());
|
||||
hasValue |= ignoredValues != null;
|
||||
for (SourceLoader.SyntheticFieldLoader loader : fields) {
|
||||
hasValue |= loader.setIgnoredValues(objectsWithIgnoredFields);
|
||||
|
@ -897,7 +895,7 @@ public class ObjectMapper extends Mapper {
|
|||
|
||||
@Override
|
||||
public String fieldName() {
|
||||
return name();
|
||||
return ObjectMapper.this.fullPath();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -142,7 +142,7 @@ public class PassThroughObjectMapper extends ObjectMapper {
|
|||
@Override
|
||||
public PassThroughObjectMapper merge(Mapper mergeWith, MapperMergeContext parentBuilderContext) {
|
||||
if (mergeWith instanceof PassThroughObjectMapper == false) {
|
||||
MapperErrors.throwObjectMappingConflictError(mergeWith.name());
|
||||
MapperErrors.throwObjectMappingConflictError(mergeWith.fullPath());
|
||||
}
|
||||
|
||||
PassThroughObjectMapper mergeWithObject = (PassThroughObjectMapper) mergeWith;
|
||||
|
@ -216,11 +216,11 @@ public class PassThroughObjectMapper extends ObjectMapper {
|
|||
public static void checkForDuplicatePriorities(Collection<PassThroughObjectMapper> passThroughMappers) {
|
||||
Map<Integer, String> seen = new HashMap<>();
|
||||
for (PassThroughObjectMapper mapper : passThroughMappers) {
|
||||
String conflict = seen.put(mapper.priority, mapper.name());
|
||||
String conflict = seen.put(mapper.priority, mapper.fullPath());
|
||||
if (conflict != null) {
|
||||
throw new MapperException(
|
||||
"Pass-through object ["
|
||||
+ mapper.name()
|
||||
+ mapper.fullPath()
|
||||
+ "] has a conflicting param [priority="
|
||||
+ mapper.priority
|
||||
+ "] with object ["
|
||||
|
|
|
@ -390,7 +390,7 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
Range range = parseRange(parser);
|
||||
context.doc().addAll(fieldType().rangeType.createFields(context, name(), range, index, hasDocValues, store));
|
||||
context.doc().addAll(fieldType().rangeType.createFields(context, fullPath(), range, index, hasDocValues, store));
|
||||
|
||||
if (hasDocValues == false && (index || store)) {
|
||||
context.addToFieldNames(fieldType().name());
|
||||
|
@ -406,7 +406,7 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
if (start != XContentParser.Token.START_OBJECT) {
|
||||
throw new DocumentParsingException(
|
||||
parser.getTokenLocation(),
|
||||
"error parsing field [" + name() + "], expected an object but got " + parser.currentName()
|
||||
"error parsing field [" + fullPath() + "], expected an object but got " + parser.currentName()
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -445,7 +445,7 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
} else {
|
||||
throw new DocumentParsingException(
|
||||
parser.getTokenLocation(),
|
||||
"error parsing field [" + name() + "], with unknown parameter [" + fieldName + "]"
|
||||
"error parsing field [" + fullPath() + "], with unknown parameter [" + fieldName + "]"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -471,15 +471,19 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
if (hasDocValues == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
|
||||
"field ["
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support synthetic source because it doesn't have doc values"
|
||||
);
|
||||
}
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
return new BinaryDocValuesSyntheticFieldLoader(name()) {
|
||||
return new BinaryDocValuesSyntheticFieldLoader(fullPath()) {
|
||||
@Override
|
||||
protected void writeValue(XContentBuilder b, BytesRef value) throws IOException {
|
||||
List<Range> ranges = type.decodeRanges(value);
|
||||
|
|
|
@ -152,7 +152,7 @@ public class RootObjectMapper extends ObjectMapper {
|
|||
|
||||
@Override
|
||||
public RootObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) {
|
||||
RootObjectMapper.Builder builder = new RootObjectMapper.Builder(name(), subobjects);
|
||||
RootObjectMapper.Builder builder = new RootObjectMapper.Builder(this.fullPath(), subobjects);
|
||||
builder.enabled = enabled;
|
||||
builder.dynamic = dynamic;
|
||||
return builder;
|
||||
|
@ -220,7 +220,7 @@ public class RootObjectMapper extends ObjectMapper {
|
|||
@Override
|
||||
public RootObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) {
|
||||
if (mergeWith instanceof RootObjectMapper == false) {
|
||||
MapperErrors.throwObjectMappingConflictError(mergeWith.name());
|
||||
MapperErrors.throwObjectMappingConflictError(mergeWith.fullPath());
|
||||
}
|
||||
|
||||
RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith;
|
||||
|
|
|
@ -476,8 +476,8 @@ public final class TextFieldMapper extends FieldMapper {
|
|||
SubFieldInfo phraseFieldInfo = buildPhraseInfo(fieldType, tft);
|
||||
SubFieldInfo prefixFieldInfo = buildPrefixInfo(context, fieldType, tft);
|
||||
for (Mapper mapper : multiFields) {
|
||||
if (mapper.name().endsWith(FAST_PHRASE_SUFFIX) || mapper.name().endsWith(FAST_PREFIX_SUFFIX)) {
|
||||
throw new MapperParsingException("Cannot use reserved field name [" + mapper.name() + "]");
|
||||
if (mapper.fullPath().endsWith(FAST_PHRASE_SUFFIX) || mapper.fullPath().endsWith(FAST_PREFIX_SUFFIX)) {
|
||||
throw new MapperParsingException("Cannot use reserved field name [" + mapper.fullPath() + "]");
|
||||
}
|
||||
}
|
||||
return new TextFieldMapper(leafName(), fieldType, tft, prefixFieldInfo, phraseFieldInfo, multiFields, copyTo, this);
|
||||
|
@ -1223,7 +1223,7 @@ public final class TextFieldMapper extends FieldMapper {
|
|||
assert mappedFieldType.getTextSearchInfo().isTokenized();
|
||||
assert mappedFieldType.hasDocValues() == false;
|
||||
if (fieldType.indexOptions() == IndexOptions.NONE && fieldType().fielddata()) {
|
||||
throw new IllegalArgumentException("Cannot enable fielddata on a [text] field that is not indexed: [" + name() + "]");
|
||||
throw new IllegalArgumentException("Cannot enable fielddata on a [text] field that is not indexed: [" + fullPath() + "]");
|
||||
}
|
||||
this.fieldType = freezeAndDeduplicateFieldType(fieldType);
|
||||
this.prefixFieldInfo = prefixFieldInfo;
|
||||
|
@ -1247,7 +1247,7 @@ public final class TextFieldMapper extends FieldMapper {
|
|||
@Override
|
||||
public Map<String, NamedAnalyzer> indexAnalyzers() {
|
||||
Map<String, NamedAnalyzer> analyzersMap = new HashMap<>();
|
||||
analyzersMap.put(name(), indexAnalyzer);
|
||||
analyzersMap.put(fullPath(), indexAnalyzer);
|
||||
if (phraseFieldInfo != null) {
|
||||
analyzersMap.put(
|
||||
phraseFieldInfo.field,
|
||||
|
@ -1455,11 +1455,11 @@ public final class TextFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
if (store) {
|
||||
return new StringStoredFieldFieldLoader(name(), leafName(), null) {
|
||||
return new StringStoredFieldFieldLoader(fullPath(), leafName(), null) {
|
||||
@Override
|
||||
protected void write(XContentBuilder b, Object value) throws IOException {
|
||||
b.value((String) value);
|
||||
|
@ -1477,7 +1477,7 @@ public final class TextFieldMapper extends FieldMapper {
|
|||
Locale.ROOT,
|
||||
"field [%s] of type [%s] doesn't support synthetic source unless it is stored or has a sub-field of"
|
||||
+ " type [keyword] with doc values or stored and without a normalizer",
|
||||
name(),
|
||||
fullPath(),
|
||||
typeName()
|
||||
)
|
||||
);
|
||||
|
|
|
@ -815,11 +815,11 @@ public final class FlattenedFieldMapper extends FieldMapper {
|
|||
return SourceLoader.SyntheticFieldLoader.NOTHING;
|
||||
}
|
||||
if (fieldType().hasDocValues()) {
|
||||
return new FlattenedSortedSetDocValuesSyntheticFieldLoader(name() + "._keyed", leafName());
|
||||
return new FlattenedSortedSetDocValuesSyntheticFieldLoader(fullPath() + "._keyed", leafName());
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1641,7 +1641,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
|
|||
if (context.doc().getByKey(fieldType().name()) != null) {
|
||||
throw new IllegalArgumentException(
|
||||
"Field ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support indexing multiple values for the same field in the same document"
|
||||
|
@ -1715,7 +1715,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
|
|||
"The ["
|
||||
+ typeName()
|
||||
+ "] field ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "] in doc ["
|
||||
+ context.documentDescription()
|
||||
+ "] has more dimensions "
|
||||
|
@ -1732,7 +1732,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
|
|||
"The ["
|
||||
+ typeName()
|
||||
+ "] field ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "] in doc ["
|
||||
+ context.documentDescription()
|
||||
+ "] has a different number of dimensions "
|
||||
|
@ -1817,7 +1817,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
if (fieldType().indexed) {
|
||||
|
@ -1848,10 +1848,10 @@ public class DenseVectorFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException {
|
||||
values = leafReader.getFloatVectorValues(name());
|
||||
values = leafReader.getFloatVectorValues(fullPath());
|
||||
if (values != null) {
|
||||
if (indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) && VectorSimilarity.COSINE.equals(vectorSimilarity)) {
|
||||
magnitudeReader = leafReader.getNumericDocValues(name() + COSINE_MAGNITUDE_FIELD_SUFFIX);
|
||||
magnitudeReader = leafReader.getNumericDocValues(fullPath() + COSINE_MAGNITUDE_FIELD_SUFFIX);
|
||||
}
|
||||
return docId -> {
|
||||
hasValue = docId == values.advance(docId);
|
||||
|
@ -1859,7 +1859,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
|
|||
return hasValue;
|
||||
};
|
||||
}
|
||||
byteVectorValues = leafReader.getByteVectorValues(name());
|
||||
byteVectorValues = leafReader.getByteVectorValues(fullPath());
|
||||
if (byteVectorValues != null) {
|
||||
return docId -> {
|
||||
hasValue = docId == byteVectorValues.advance(docId);
|
||||
|
@ -1903,7 +1903,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public String fieldName() {
|
||||
return name();
|
||||
return fullPath();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1923,7 +1923,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException {
|
||||
values = leafReader.getBinaryDocValues(name());
|
||||
values = leafReader.getBinaryDocValues(fullPath());
|
||||
if (values == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -1958,7 +1958,7 @@ public class DenseVectorFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public String fieldName() {
|
||||
return name();
|
||||
return fullPath();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -186,14 +186,14 @@ public class SparseVectorFieldMapper extends FieldMapper {
|
|||
} else if (token == Token.VALUE_NULL) {
|
||||
// ignore feature, this is consistent with numeric fields
|
||||
} else if (token == Token.VALUE_NUMBER || token == Token.VALUE_STRING) {
|
||||
final String key = name() + "." + feature;
|
||||
final String key = fullPath() + "." + feature;
|
||||
float value = context.parser().floatValue(true);
|
||||
|
||||
// if we have an existing feature of the same name we'll select for the one with the max value
|
||||
// based on recommendations from this paper: https://arxiv.org/pdf/2305.18494.pdf
|
||||
IndexableField currentField = context.doc().getByKey(key);
|
||||
if (currentField == null) {
|
||||
context.doc().addWithKey(key, new FeatureField(name(), feature, value));
|
||||
context.doc().addWithKey(key, new FeatureField(fullPath(), feature, value));
|
||||
} else if (currentField instanceof FeatureField && ((FeatureField) currentField).getFeatureValue() < value) {
|
||||
((FeatureField) currentField).setFeatureValue(value);
|
||||
}
|
||||
|
|
|
@ -943,8 +943,8 @@ final class DefaultSearchContext extends SearchContext {
|
|||
// to the routing path.
|
||||
Set<String> matchingRoutingPaths = new TreeSet<>(routingPaths);
|
||||
for (Mapper mapper : indexService.mapperService().mappingLookup().fieldMappers()) {
|
||||
if (mapper instanceof KeywordFieldMapper && indexRouting.matchesField(mapper.name())) {
|
||||
matchingRoutingPaths.add(mapper.name());
|
||||
if (mapper instanceof KeywordFieldMapper && indexRouting.matchesField(mapper.fullPath())) {
|
||||
matchingRoutingPaths.add(mapper.fullPath());
|
||||
}
|
||||
}
|
||||
routingPaths = new ArrayList<>(matchingRoutingPaths);
|
||||
|
|
|
@ -248,7 +248,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
|
|||
Mapper fieldMapper = defaultMapper.mappers().getMapper("field");
|
||||
|
||||
ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.field("field", "suggestion")));
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertFieldsOfType(fields);
|
||||
}
|
||||
|
||||
|
@ -495,7 +495,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
|
|||
|
||||
ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.array("field", "suggestion1", "suggestion2")));
|
||||
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertThat(fields, containsInAnyOrder(suggestField("suggestion1"), suggestField("suggestion2")));
|
||||
}
|
||||
|
||||
|
@ -512,7 +512,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
|
|||
b.endObject();
|
||||
}));
|
||||
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertThat(fields, containsInAnyOrder(suggestField("suggestion")));
|
||||
}
|
||||
|
||||
|
@ -529,7 +529,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
|
|||
b.endObject();
|
||||
}));
|
||||
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertThat(fields, containsInAnyOrder(suggestField("suggestion1"), suggestField("suggestion2"), suggestField("suggestion3")));
|
||||
}
|
||||
|
||||
|
@ -566,7 +566,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
|
|||
b.endObject();
|
||||
}));
|
||||
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertFieldsOfType(fields);
|
||||
}
|
||||
|
||||
|
@ -584,7 +584,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
|
|||
b.endArray();
|
||||
}));
|
||||
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertThat(fields, containsInAnyOrder(suggestField("suggestion1"), suggestField("suggestion2"), suggestField("suggestion3")));
|
||||
}
|
||||
|
||||
|
@ -617,7 +617,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
|
|||
b.endArray();
|
||||
}));
|
||||
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertThat(
|
||||
fields,
|
||||
containsInAnyOrder(
|
||||
|
|
|
@ -1568,7 +1568,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
|
|||
b.endObject();
|
||||
})).rootDoc();
|
||||
|
||||
assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay"));
|
||||
assertThat(doc.get(docMapper.mappers().getMapper("name.first").fullPath()), equalTo("shay"));
|
||||
}
|
||||
|
||||
public void testParseToJsonAndParse() throws Exception {
|
||||
|
@ -1581,7 +1581,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
|
|||
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
|
||||
LuceneDocument doc = builtDocMapper.parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc();
|
||||
assertThat(doc.getBinaryValue(IdFieldMapper.NAME), equalTo(Uid.encodeId("1")));
|
||||
assertThat(doc.get(builtDocMapper.mappers().getMapper("name.first").name()), equalTo("shay"));
|
||||
assertThat(doc.get(builtDocMapper.mappers().getMapper("name.first").fullPath()), equalTo("shay"));
|
||||
}
|
||||
|
||||
public void testSimpleParser() throws Exception {
|
||||
|
@ -1593,7 +1593,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
|
|||
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
|
||||
LuceneDocument doc = docMapper.parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc();
|
||||
assertThat(doc.getBinaryValue(IdFieldMapper.NAME), equalTo(Uid.encodeId("1")));
|
||||
assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay"));
|
||||
assertThat(doc.get(docMapper.mappers().getMapper("name.first").fullPath()), equalTo("shay"));
|
||||
}
|
||||
|
||||
public void testSimpleParserNoTypeNoId() throws Exception {
|
||||
|
@ -1602,7 +1602,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
|
|||
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json"));
|
||||
LuceneDocument doc = docMapper.parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc();
|
||||
assertThat(doc.getBinaryValue(IdFieldMapper.NAME), equalTo(Uid.encodeId("1")));
|
||||
assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay"));
|
||||
assertThat(doc.get(docMapper.mappers().getMapper("name.first").fullPath()), equalTo("shay"));
|
||||
}
|
||||
|
||||
public void testAttributes() throws Exception {
|
||||
|
@ -2605,7 +2605,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
|
|||
assertTrue(barMapper instanceof ObjectMapper);
|
||||
Mapper baz = ((ObjectMapper) barMapper).getMapper("baz");
|
||||
assertNotNull(baz);
|
||||
assertEquals("foo.bar.baz", baz.name());
|
||||
assertEquals("foo.bar.baz", baz.fullPath());
|
||||
assertEquals("baz", baz.leafName());
|
||||
List<IndexableField> fields = doc.rootDoc().getFields("foo.bar.baz");
|
||||
assertEquals(2, fields.size());
|
||||
|
@ -3245,7 +3245,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
|
|||
|
||||
@Override
|
||||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
return new StringStoredFieldFieldLoader(name(), leafName(), null) {
|
||||
return new StringStoredFieldFieldLoader(fullPath(), leafName(), null) {
|
||||
@Override
|
||||
protected void write(XContentBuilder b, Object value) throws IOException {
|
||||
BytesRef ref = (BytesRef) value;
|
||||
|
|
|
@ -59,7 +59,7 @@ public class DynamicFieldsBuilderTests extends ESTestCase {
|
|||
DynamicFieldsBuilder.DYNAMIC_TRUE.createDynamicFieldFromValue(ctx, fieldname);
|
||||
List<Mapper> dynamicMappers = ctx.getDynamicMappers();
|
||||
assertEquals(1, dynamicMappers.size());
|
||||
assertEquals(fieldname, dynamicMappers.get(0).name());
|
||||
assertEquals(fieldname, dynamicMappers.get(0).fullPath());
|
||||
assertEquals(expectedType, dynamicMappers.get(0).typeName());
|
||||
}
|
||||
|
||||
|
@ -90,7 +90,7 @@ public class DynamicFieldsBuilderTests extends ESTestCase {
|
|||
DynamicFieldsBuilder.DYNAMIC_TRUE.createDynamicFieldFromValue(ctx, "f1");
|
||||
List<Mapper> dynamicMappers = ctx.getDynamicMappers();
|
||||
assertEquals(1, dynamicMappers.size());
|
||||
assertEquals("labels.f1", dynamicMappers.get(0).name());
|
||||
assertEquals("labels.f1", dynamicMappers.get(0).fullPath());
|
||||
assertEquals("keyword", dynamicMappers.get(0).typeName());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1473,7 +1473,7 @@ public class MapperServiceTests extends MapperServiceTestCase {
|
|||
assertNotNull(mapper.mappers().getMapper("text_field"));
|
||||
FieldMapper.MultiFields multiFields = ((TextFieldMapper) mapper.mappers().getMapper("text_field")).multiFields();
|
||||
Map<String, FieldMapper> multiFieldMap = StreamSupport.stream(multiFields.spliterator(), false)
|
||||
.collect(Collectors.toMap(FieldMapper::name, Function.identity()));
|
||||
.collect(Collectors.toMap(FieldMapper::fullPath, Function.identity()));
|
||||
assertThat(multiFieldMap.keySet(), contains("text_field.multi_field1"));
|
||||
assertTrue(multiFieldMap.get("text_field.multi_field1").ignoreMalformed());
|
||||
}
|
||||
|
|
|
@ -101,7 +101,7 @@ public class MappingLookupInferenceFieldMapperTests extends MapperServiceTestCas
|
|||
|
||||
@Override
|
||||
public InferenceFieldMetadata getMetadata(Set<String> sourcePaths) {
|
||||
return new InferenceFieldMetadata(name(), INFERENCE_ID, sourcePaths.toArray(new String[0]));
|
||||
return new InferenceFieldMetadata(fullPath(), INFERENCE_ID, sourcePaths.toArray(new String[0]));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -207,25 +207,25 @@ public class MappingParserTests extends MapperServiceTestCase {
|
|||
Mapper object = mapping.getRoot().getMapper("obj");
|
||||
assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class));
|
||||
assertEquals("obj", object.leafName());
|
||||
assertEquals("obj", object.name());
|
||||
assertEquals("obj", object.fullPath());
|
||||
ObjectMapper objectMapper = (ObjectMapper) object;
|
||||
assertEquals(1, objectMapper.mappers.size());
|
||||
object = objectMapper.getMapper("source");
|
||||
assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class));
|
||||
assertEquals("source", object.leafName());
|
||||
assertEquals("obj.source", object.name());
|
||||
assertEquals("obj.source", object.fullPath());
|
||||
objectMapper = (ObjectMapper) object;
|
||||
assertEquals(1, objectMapper.mappers.size());
|
||||
object = objectMapper.getMapper("geo");
|
||||
assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class));
|
||||
assertEquals("geo", object.leafName());
|
||||
assertEquals("obj.source.geo", object.name());
|
||||
assertEquals("obj.source.geo", object.fullPath());
|
||||
objectMapper = (ObjectMapper) object;
|
||||
assertEquals(1, objectMapper.mappers.size());
|
||||
Mapper location = objectMapper.getMapper("location");
|
||||
assertThat(location, CoreMatchers.instanceOf(GeoPointFieldMapper.class));
|
||||
GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) location;
|
||||
assertEquals("obj.source.geo.location", geoPointFieldMapper.name());
|
||||
assertEquals("obj.source.geo.location", geoPointFieldMapper.fullPath());
|
||||
assertEquals("location", geoPointFieldMapper.leafName());
|
||||
assertEquals("obj.source.geo.location", geoPointFieldMapper.mappedFieldType.name());
|
||||
}
|
||||
|
|
|
@ -222,7 +222,7 @@ public class MultiFieldTests extends MapperServiceTestCase {
|
|||
Mapper mapper = mapperService.mappingLookup().getMapper("field");
|
||||
assertThat(mapper, instanceOf(FieldMapper.class));
|
||||
final Set<String> fieldsUsingSourcePath = new HashSet<>();
|
||||
((FieldMapper) mapper).sourcePathUsedBy().forEachRemaining(mapper1 -> fieldsUsingSourcePath.add(mapper1.name()));
|
||||
((FieldMapper) mapper).sourcePathUsedBy().forEachRemaining(mapper1 -> fieldsUsingSourcePath.add(mapper1.fullPath()));
|
||||
assertThat(fieldsUsingSourcePath, equalTo(Set.of("field.subfield1", "field.subfield2")));
|
||||
|
||||
assertThat(mapperService.mappingLookup().sourcePaths("field.subfield1"), equalTo(Set.of("field")));
|
||||
|
|
|
@ -54,10 +54,10 @@ public final class ObjectMapperMergeTests extends ESTestCase {
|
|||
{
|
||||
Mapper bar = mergedFoo.getMapper("bar");
|
||||
assertEquals("bar", bar.leafName());
|
||||
assertEquals("foo.bar", bar.name());
|
||||
assertEquals("foo.bar", bar.fullPath());
|
||||
Mapper baz = mergedFoo.getMapper("baz");
|
||||
assertEquals("baz", baz.leafName());
|
||||
assertEquals("foo.baz", baz.name());
|
||||
assertEquals("foo.baz", baz.fullPath());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -139,7 +139,7 @@ public final class ObjectMapperMergeTests extends ESTestCase {
|
|||
final ObjectMapper merged = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, MAPPING_UPDATE, Long.MAX_VALUE));
|
||||
|
||||
final KeywordFieldMapper keywordFieldMapper = (KeywordFieldMapper) merged.getMapper("host.name");
|
||||
assertEquals("host.name", keywordFieldMapper.name());
|
||||
assertEquals("host.name", keywordFieldMapper.fullPath());
|
||||
assertEquals("host.name", keywordFieldMapper.leafName());
|
||||
}
|
||||
|
||||
|
@ -156,7 +156,7 @@ public final class ObjectMapperMergeTests extends ESTestCase {
|
|||
ObjectMapper foo = (ObjectMapper) merged.getMapper("foo");
|
||||
ObjectMapper metrics = (ObjectMapper) foo.getMapper("metrics");
|
||||
final KeywordFieldMapper keywordFieldMapper = (KeywordFieldMapper) metrics.getMapper("host.name");
|
||||
assertEquals("foo.metrics.host.name", keywordFieldMapper.name());
|
||||
assertEquals("foo.metrics.host.name", keywordFieldMapper.fullPath());
|
||||
assertEquals("host.name", keywordFieldMapper.leafName());
|
||||
}
|
||||
|
||||
|
@ -169,10 +169,10 @@ public final class ObjectMapperMergeTests extends ESTestCase {
|
|||
final ObjectMapper merged = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, MAPPING_UPDATE, Long.MAX_VALUE));
|
||||
|
||||
TextFieldMapper text = (TextFieldMapper) merged.getMapper("text");
|
||||
assertEquals("text", text.name());
|
||||
assertEquals("text", text.fullPath());
|
||||
assertEquals("text", text.leafName());
|
||||
KeywordFieldMapper keyword = (KeywordFieldMapper) text.multiFields().iterator().next();
|
||||
assertEquals("text.keyword", keyword.name());
|
||||
assertEquals("text.keyword", keyword.fullPath());
|
||||
assertEquals("keyword", keyword.leafName());
|
||||
}
|
||||
|
||||
|
@ -189,10 +189,10 @@ public final class ObjectMapperMergeTests extends ESTestCase {
|
|||
ObjectMapper foo = (ObjectMapper) merged.getMapper("foo");
|
||||
ObjectMapper metrics = (ObjectMapper) foo.getMapper("metrics");
|
||||
final TextFieldMapper textFieldMapper = (TextFieldMapper) metrics.getMapper("host.name");
|
||||
assertEquals("foo.metrics.host.name", textFieldMapper.name());
|
||||
assertEquals("foo.metrics.host.name", textFieldMapper.fullPath());
|
||||
assertEquals("host.name", textFieldMapper.leafName());
|
||||
FieldMapper fieldMapper = textFieldMapper.multiFields.iterator().next();
|
||||
assertEquals("foo.metrics.host.name.keyword", fieldMapper.name());
|
||||
assertEquals("foo.metrics.host.name.keyword", fieldMapper.fullPath());
|
||||
assertEquals("keyword", fieldMapper.leafName());
|
||||
}
|
||||
|
||||
|
@ -325,7 +325,7 @@ public final class ObjectMapperMergeTests extends ESTestCase {
|
|||
FieldMapper.Builder fieldBuilder = new KeywordFieldMapper.Builder("host.name", IndexVersion.current());
|
||||
FieldMapper fieldMapper = fieldBuilder.build(MapperBuilderContext.root(false, false));
|
||||
assertEquals("host.name", fieldMapper.leafName());
|
||||
assertEquals("host.name", fieldMapper.name());
|
||||
assertEquals("host.name", fieldMapper.fullPath());
|
||||
return new RootObjectMapper.Builder("_doc", Explicit.EXPLICIT_FALSE).add(fieldBuilder)
|
||||
.build(MapperBuilderContext.root(false, false));
|
||||
}
|
||||
|
@ -343,7 +343,7 @@ public final class ObjectMapperMergeTests extends ESTestCase {
|
|||
)
|
||||
);
|
||||
assertEquals("host.name", fieldMapper.leafName());
|
||||
assertEquals("foo.metrics.host.name", fieldMapper.name());
|
||||
assertEquals("foo.metrics.host.name", fieldMapper.fullPath());
|
||||
return new ObjectMapper.Builder("foo", ObjectMapper.Defaults.SUBOBJECTS).add(
|
||||
new ObjectMapper.Builder("metrics", Explicit.EXPLICIT_FALSE).add(fieldBuilder)
|
||||
);
|
||||
|
@ -362,10 +362,10 @@ public final class ObjectMapperMergeTests extends ESTestCase {
|
|||
)
|
||||
);
|
||||
assertEquals("host.name", textKeywordMultiField.leafName());
|
||||
assertEquals("foo.metrics.host.name", textKeywordMultiField.name());
|
||||
assertEquals("foo.metrics.host.name", textKeywordMultiField.fullPath());
|
||||
FieldMapper fieldMapper = textKeywordMultiField.multiFields.iterator().next();
|
||||
assertEquals("keyword", fieldMapper.leafName());
|
||||
assertEquals("foo.metrics.host.name.keyword", fieldMapper.name());
|
||||
assertEquals("foo.metrics.host.name.keyword", fieldMapper.fullPath());
|
||||
return new ObjectMapper.Builder("foo", ObjectMapper.Defaults.SUBOBJECTS).add(
|
||||
new ObjectMapper.Builder("metrics", Explicit.EXPLICIT_FALSE).add(fieldBuilder)
|
||||
);
|
||||
|
|
|
@ -607,7 +607,7 @@ public class ObjectMapperTests extends MapperServiceTestCase {
|
|||
new KeywordFieldMapper.Builder("keyword2", IndexVersion.current())
|
||||
)
|
||||
).add(new KeywordFieldMapper.Builder("keyword1", IndexVersion.current())).build(rootContext);
|
||||
List<String> fields = objectMapper.asFlattenedFieldMappers(rootContext).stream().map(FieldMapper::name).toList();
|
||||
List<String> fields = objectMapper.asFlattenedFieldMappers(rootContext).stream().map(FieldMapper::fullPath).toList();
|
||||
assertThat(fields, containsInAnyOrder("parent.keyword1", "parent.child.keyword2"));
|
||||
}
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
|
|||
XContentType.JSON
|
||||
)
|
||||
);
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertContextSuggestFields(fields, 7);
|
||||
}
|
||||
|
||||
|
@ -156,7 +156,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
|
|||
XContentType.JSON
|
||||
)
|
||||
);
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertContextSuggestFields(fields, 3);
|
||||
}
|
||||
|
||||
|
@ -200,7 +200,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
|
|||
XContentType.JSON
|
||||
)
|
||||
);
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertContextSuggestFields(fields, 3);
|
||||
}
|
||||
|
||||
|
@ -244,7 +244,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
|
|||
XContentType.JSON
|
||||
)
|
||||
);
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertContextSuggestFields(fields, 3);
|
||||
}
|
||||
|
||||
|
@ -328,7 +328,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
|
|||
XContentType.JSON
|
||||
)
|
||||
);
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertContextSuggestFields(fields, 3);
|
||||
}
|
||||
|
||||
|
@ -370,7 +370,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
|
|||
XContentType.JSON
|
||||
)
|
||||
);
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertContextSuggestFields(fields, 3);
|
||||
}
|
||||
|
||||
|
@ -449,7 +449,7 @@ public class CategoryContextMappingTests extends MapperServiceTestCase {
|
|||
.endArray()
|
||||
.endObject();
|
||||
ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("1", BytesReference.bytes(builder), XContentType.JSON));
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
List<IndexableField> fields = parsedDocument.rootDoc().getFields(fieldMapper.fullPath());
|
||||
assertContextSuggestFields(fields, 3);
|
||||
}
|
||||
|
||||
|
|
|
@ -1039,13 +1039,13 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
|
|||
return "All fields that match routing_path must be configured with [time_series_dimension: true] "
|
||||
+ "or flattened fields with a list of dimensions in [time_series_dimensions] and "
|
||||
+ "without the [script] parameter. ["
|
||||
+ mapper.name()
|
||||
+ mapper.fullPath()
|
||||
+ "] was not a dimension.";
|
||||
}
|
||||
return "All fields that match routing_path must be configured with [time_series_dimension: true] "
|
||||
+ "or flattened fields with a list of dimensions in [time_series_dimensions] and "
|
||||
+ "without the [script] parameter. ["
|
||||
+ mapper.name()
|
||||
+ mapper.fullPath()
|
||||
+ "] was ["
|
||||
+ mapper.typeName()
|
||||
+ "].";
|
||||
|
|
|
@ -334,7 +334,7 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
throw new DocumentParsingException(
|
||||
subParser.getTokenLocation(),
|
||||
"error parsing field ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "], ["
|
||||
+ VALUES_FIELD
|
||||
+ "] values must be in increasing order, got ["
|
||||
|
@ -363,7 +363,7 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
} else {
|
||||
throw new DocumentParsingException(
|
||||
subParser.getTokenLocation(),
|
||||
"error parsing field [" + name() + "], with unknown parameter [" + fieldName + "]"
|
||||
"error parsing field [" + fullPath() + "], with unknown parameter [" + fieldName + "]"
|
||||
);
|
||||
}
|
||||
token = subParser.nextToken();
|
||||
|
@ -371,20 +371,20 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
if (values == null) {
|
||||
throw new DocumentParsingException(
|
||||
subParser.getTokenLocation(),
|
||||
"error parsing field [" + name() + "], expected field called [" + VALUES_FIELD.getPreferredName() + "]"
|
||||
"error parsing field [" + fullPath() + "], expected field called [" + VALUES_FIELD.getPreferredName() + "]"
|
||||
);
|
||||
}
|
||||
if (counts == null) {
|
||||
throw new DocumentParsingException(
|
||||
subParser.getTokenLocation(),
|
||||
"error parsing field [" + name() + "], expected field called [" + COUNTS_FIELD.getPreferredName() + "]"
|
||||
"error parsing field [" + fullPath() + "], expected field called [" + COUNTS_FIELD.getPreferredName() + "]"
|
||||
);
|
||||
}
|
||||
if (values.size() != counts.size()) {
|
||||
throw new DocumentParsingException(
|
||||
subParser.getTokenLocation(),
|
||||
"error parsing field ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "], expected same length from ["
|
||||
+ VALUES_FIELD.getPreferredName()
|
||||
+ "] and "
|
||||
|
@ -403,7 +403,7 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
if (count < 0) {
|
||||
throw new DocumentParsingException(
|
||||
subParser.getTokenLocation(),
|
||||
"error parsing field [" + name() + "], [" + COUNTS_FIELD + "] elements must be >= 0 but got " + counts.get(i)
|
||||
"error parsing field [" + fullPath() + "], [" + COUNTS_FIELD + "] elements must be >= 0 but got " + counts.get(i)
|
||||
);
|
||||
} else if (count > 0) {
|
||||
// we do not add elements with count == 0
|
||||
|
@ -416,11 +416,11 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
BytesRef docValue = streamOutput.bytes().toBytesRef();
|
||||
Field field = new BinaryDocValuesField(name(), docValue);
|
||||
Field field = new BinaryDocValuesField(fullPath(), docValue);
|
||||
if (context.doc().getByKey(fieldType().name()) != null) {
|
||||
throw new IllegalArgumentException(
|
||||
"Field ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support indexing multiple values for the same field in the same document"
|
||||
|
@ -448,7 +448,7 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
if (malformedDataForSyntheticSource != null) {
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), malformedDataForSyntheticSource));
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), malformedDataForSyntheticSource));
|
||||
}
|
||||
|
||||
context.addIgnoredField(fieldType().name());
|
||||
|
@ -516,15 +516,15 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [histogram] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [histogram] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
|
||||
return new CompositeSyntheticFieldLoader(
|
||||
leafName(),
|
||||
name(),
|
||||
fullPath(),
|
||||
new HistogramSyntheticFieldLoader(),
|
||||
new CompositeSyntheticFieldLoader.MalformedValuesLayer(name())
|
||||
new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath())
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -587,7 +587,7 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public String fieldName() {
|
||||
return name();
|
||||
return fullPath();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -197,7 +197,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie
|
|||
boolean isWithinLeaf = context.path().isWithinLeafObject();
|
||||
try {
|
||||
context.path().setWithinLeafObject(true);
|
||||
field = SemanticTextField.parse(parser, new Tuple<>(name(), context.parser().contentType()));
|
||||
field = SemanticTextField.parse(parser, new Tuple<>(fullPath(), context.parser().contentType()));
|
||||
} finally {
|
||||
context.path().setWithinLeafObject(isWithinLeaf);
|
||||
}
|
||||
|
@ -242,7 +242,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie
|
|||
throw new DocumentParsingException(
|
||||
xContentLocation,
|
||||
"Incompatible model settings for field ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "]. Check that the "
|
||||
+ INFERENCE_ID_FIELD
|
||||
+ " is not using different model settings",
|
||||
|
@ -284,12 +284,12 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie
|
|||
String[] copyFields = sourcePaths.toArray(String[]::new);
|
||||
// ensure consistent order
|
||||
Arrays.sort(copyFields);
|
||||
return new InferenceFieldMetadata(name(), fieldType().inferenceId, copyFields);
|
||||
return new InferenceFieldMetadata(fullPath(), fieldType().inferenceId, copyFields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getOriginalValue(Map<String, Object> sourceAsMap) {
|
||||
Object fieldValue = sourceAsMap.get(name());
|
||||
Object fieldValue = sourceAsMap.get(fullPath());
|
||||
if (fieldValue == null) {
|
||||
return null;
|
||||
} else if (fieldValue instanceof Map<?, ?> == false) {
|
||||
|
@ -297,7 +297,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie
|
|||
return fieldValue;
|
||||
}
|
||||
|
||||
Map<String, Object> fieldValueMap = XContentMapValues.nodeMapValue(fieldValue, "Field [" + name() + "]");
|
||||
Map<String, Object> fieldValueMap = XContentMapValues.nodeMapValue(fieldValue, "Field [" + fullPath() + "]");
|
||||
return XContentMapValues.extractValue(TEXT_FIELD, fieldValueMap);
|
||||
}
|
||||
|
||||
|
|
|
@ -314,7 +314,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase {
|
|||
.getNestedMappers()
|
||||
.get(getChunksFieldName(fieldName));
|
||||
assertThat(chunksMapper, equalTo(semanticFieldMapper.fieldType().getChunksField()));
|
||||
assertThat(chunksMapper.name(), equalTo(getChunksFieldName(fieldName)));
|
||||
assertThat(chunksMapper.fullPath(), equalTo(getChunksFieldName(fieldName)));
|
||||
Mapper textMapper = chunksMapper.getMapper(CHUNKED_TEXT_FIELD);
|
||||
assertNotNull(textMapper);
|
||||
assertThat(textMapper, instanceOf(KeywordFieldMapper.class));
|
||||
|
@ -328,7 +328,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase {
|
|||
assertThat(embeddingsMapper, instanceOf(FieldMapper.class));
|
||||
FieldMapper embeddingsFieldMapper = (FieldMapper) embeddingsMapper;
|
||||
assertTrue(embeddingsFieldMapper.fieldType() == mapperService.mappingLookup().getFieldType(getEmbeddingsFieldName(fieldName)));
|
||||
assertThat(embeddingsMapper.name(), equalTo(getEmbeddingsFieldName(fieldName)));
|
||||
assertThat(embeddingsMapper.fullPath(), equalTo(getEmbeddingsFieldName(fieldName)));
|
||||
switch (semanticFieldMapper.fieldType().getModelSettings().taskType()) {
|
||||
case SPARSE_EMBEDDING -> assertThat(embeddingsMapper, instanceOf(SparseVectorFieldMapper.class));
|
||||
case TEXT_EMBEDDING -> assertThat(embeddingsMapper, instanceOf(DenseVectorFieldMapper.class));
|
||||
|
|
|
@ -665,7 +665,7 @@ public class AggregateDoubleMetricFieldMapper extends FieldMapper {
|
|||
if (context.doc().getByKey(delegateFieldMapper.fieldType().name()) != null) {
|
||||
throw new IllegalArgumentException(
|
||||
"Field ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] does not support indexing multiple values for the same field in the same document"
|
||||
|
@ -687,10 +687,10 @@ public class AggregateDoubleMetricFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
if (malformedDataForSyntheticSource != null) {
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), malformedDataForSyntheticSource));
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), malformedDataForSyntheticSource));
|
||||
}
|
||||
|
||||
context.addIgnoredField(name());
|
||||
context.addIgnoredField(fullPath());
|
||||
context.path().remove();
|
||||
return;
|
||||
}
|
||||
|
@ -719,9 +719,9 @@ public class AggregateDoubleMetricFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
return new CompositeSyntheticFieldLoader(
|
||||
leafName(),
|
||||
name(),
|
||||
new AggregateMetricSyntheticFieldLoader(name(), leafName(), metrics),
|
||||
new CompositeSyntheticFieldLoader.MalformedValuesLayer(name())
|
||||
fullPath(),
|
||||
new AggregateMetricSyntheticFieldLoader(fullPath(), leafName(), metrics),
|
||||
new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath())
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -316,7 +316,7 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
|
|||
final String value = parser.textOrNull();
|
||||
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("[constant_keyword] field [" + name() + "] doesn't accept [null] values");
|
||||
throw new IllegalArgumentException("[constant_keyword] field [" + fullPath() + "] doesn't accept [null] values");
|
||||
}
|
||||
|
||||
if (fieldType().value == null) {
|
||||
|
@ -328,7 +328,7 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
|
|||
} else if (Objects.equals(fieldType().value, value) == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"[constant_keyword] field ["
|
||||
+ name()
|
||||
+ fullPath()
|
||||
+ "] only accepts values that are equal to the value defined in the mappings ["
|
||||
+ fieldType().value()
|
||||
+ "], but got ["
|
||||
|
@ -380,7 +380,7 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public String fieldName() {
|
||||
return name();
|
||||
return fullPath();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -354,12 +354,12 @@ public class CountedKeywordFieldMapper extends FieldMapper {
|
|||
int i = 0;
|
||||
int[] counts = new int[values.size()];
|
||||
for (Map.Entry<String, Integer> value : values.entrySet()) {
|
||||
context.doc().add(new KeywordFieldMapper.KeywordField(name(), new BytesRef(value.getKey()), fieldType));
|
||||
context.doc().add(new KeywordFieldMapper.KeywordField(fullPath(), new BytesRef(value.getKey()), fieldType));
|
||||
counts[i++] = value.getValue();
|
||||
}
|
||||
BytesStreamOutput streamOutput = new BytesStreamOutput();
|
||||
streamOutput.writeVIntArray(counts);
|
||||
context.doc().add(new BinaryDocValuesField(countFieldMapper.name(), streamOutput.bytes().toBytesRef()));
|
||||
context.doc().add(new BinaryDocValuesField(countFieldMapper.fullPath(), streamOutput.bytes().toBytesRef()));
|
||||
}
|
||||
|
||||
private void parseArray(DocumentParserContext context, SortedMap<String, Integer> values) throws IOException {
|
||||
|
|
|
@ -636,7 +636,7 @@ public class UnsignedLongFieldMapper extends FieldMapper {
|
|||
context.addIgnoredField(mappedFieldType.name());
|
||||
if (isSourceSynthetic) {
|
||||
// Save a copy of the field so synthetic source can load it
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser()));
|
||||
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
|
@ -753,9 +753,9 @@ public class UnsignedLongFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public void doValidate(MappingLookup lookup) {
|
||||
if (dimension && null != lookup.nestedLookup().getNestedParent(name())) {
|
||||
if (dimension && null != lookup.nestedLookup().getNestedParent(fullPath())) {
|
||||
throw new IllegalArgumentException(
|
||||
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]"
|
||||
TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + fullPath() + "]"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -769,15 +769,19 @@ public class UnsignedLongFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
if (hasDocValues == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
|
||||
"field ["
|
||||
+ fullPath()
|
||||
+ "] of type ["
|
||||
+ typeName()
|
||||
+ "] doesn't support synthetic source because it doesn't have doc values"
|
||||
);
|
||||
}
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
return new SortedNumericDocValuesSyntheticFieldLoader(name(), leafName(), ignoreMalformed()) {
|
||||
return new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed()) {
|
||||
@Override
|
||||
protected void writeValue(XContentBuilder b, long value) throws IOException {
|
||||
b.value(DocValueFormat.UNSIGNED_LONG_SHIFTED.format(value));
|
||||
|
|
|
@ -454,10 +454,10 @@ public class VersionStringFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
return new SortedSetDocValuesSyntheticFieldLoader(name(), leafName(), null, false) {
|
||||
return new SortedSetDocValuesSyntheticFieldLoader(fullPath(), leafName(), null, false) {
|
||||
@Override
|
||||
protected BytesRef convert(BytesRef value) {
|
||||
return VersionEncoder.decodeVersion(value);
|
||||
|
|
|
@ -476,7 +476,7 @@ public class GeoShapeWithDocValuesFieldMapper extends AbstractShapeGeometryField
|
|||
protected void checkIncomingMergeType(FieldMapper mergeWith) {
|
||||
if (mergeWith instanceof GeoShapeWithDocValuesFieldMapper == false && CONTENT_TYPE.equals(mergeWith.typeName())) {
|
||||
throw new IllegalArgumentException(
|
||||
"mapper [" + name() + "] of type [geo_shape] cannot change strategy from [BKD] to [recursive]"
|
||||
"mapper [" + fullPath() + "] of type [geo_shape] cannot change strategy from [BKD] to [recursive]"
|
||||
);
|
||||
}
|
||||
super.checkIncomingMergeType(mergeWith);
|
||||
|
|
|
@ -949,7 +949,7 @@ public class WildcardFieldMapper extends FieldMapper {
|
|||
if (value.length() <= ignoreAbove) {
|
||||
createFields(value, parseDoc, fields);
|
||||
} else {
|
||||
context.addIgnoredField(name());
|
||||
context.addIgnoredField(fullPath());
|
||||
if (storeIgnored) {
|
||||
parseDoc.add(new StoredField(originalName(), new BytesRef(value)));
|
||||
}
|
||||
|
@ -959,7 +959,7 @@ public class WildcardFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
private String originalName() {
|
||||
return name() + "._original";
|
||||
return fullPath() + "._original";
|
||||
}
|
||||
|
||||
void createFields(String value, LuceneDocument parseDoc, List<IndexableField> fields) {
|
||||
|
@ -1000,7 +1000,7 @@ public class WildcardFieldMapper extends FieldMapper {
|
|||
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
|
||||
if (copyTo.copyToFields().isEmpty() != true) {
|
||||
throw new IllegalArgumentException(
|
||||
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
|
||||
);
|
||||
}
|
||||
return new WildcardSyntheticFieldLoader();
|
||||
|
@ -1023,7 +1023,7 @@ public class WildcardFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException {
|
||||
BinaryDocValues values = leafReader.getBinaryDocValues(name());
|
||||
BinaryDocValues values = leafReader.getBinaryDocValues(fullPath());
|
||||
if (values == null) {
|
||||
docValueCount = 0;
|
||||
return null;
|
||||
|
@ -1075,7 +1075,7 @@ public class WildcardFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public String fieldName() {
|
||||
return name();
|
||||
return fullPath();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ public class WildcardFieldAggregationTests extends AggregatorTestCase {
|
|||
}
|
||||
|
||||
private void indexDoc(LuceneDocument parseDoc, Document doc, RandomIndexWriter iw) throws IOException {
|
||||
IndexableField field = parseDoc.getByKey(wildcardFieldMapper.name());
|
||||
IndexableField field = parseDoc.getByKey(wildcardFieldMapper.fullPath());
|
||||
if (field != null) {
|
||||
doc.add(field);
|
||||
}
|
||||
|
|
|
@ -1136,7 +1136,7 @@ public class WildcardFieldMapperTests extends MapperTestCase {
|
|||
}
|
||||
|
||||
private void indexDoc(LuceneDocument parseDoc, Document doc, RandomIndexWriter iw) throws IOException {
|
||||
IndexableField field = parseDoc.getByKey(wildcardFieldType.name());
|
||||
IndexableField field = parseDoc.getByKey(wildcardFieldType.fullPath());
|
||||
if (field != null) {
|
||||
doc.add(field);
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue