Move Text class to libs/xcontent (#128780)

This PR is a precursor to #126492.

It does three things:
1. Move org.elasticsearch.common.text.Text from :server to
   org.elasticsearch.xcontent.Text in :libs:x-content.
2. Refactor the Text class to use a new EncodedBytes record instead of
   the elasticsearch BytesReference.
3. Add the XContentString interface, with the Text class implementing
   that interface.

These changes were originally implemented in #127666 and #128316,
however they were reverted in #128484 due to problems caused by the
mutable nature of java ByteBuffers. This is resolved by instead using a
new immutable EncodedBytes record.
This commit is contained in:
Jordan Powers 2025-06-04 11:22:03 -07:00 committed by GitHub
parent 767d53fefa
commit de40ac45d1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
42 changed files with 373 additions and 79 deletions

View file

@ -6,22 +6,17 @@
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.common.text;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.XContentBuilder;
package org.elasticsearch.xcontent;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
/**
* Both {@link String} and {@link BytesReference} representation of the text. Starts with one of those, and if
* the other is requests, caches the other one in a local reference so no additional conversion will be needed.
* Both {@link String} and {@link UTF8Bytes} representation of the text. Starts with one of those, and if
* the other is requested, caches the other one in a local reference so no additional conversion will be needed.
*/
public final class Text implements Comparable<Text>, ToXContentFragment {
public final class Text implements XContentString, Comparable<Text>, ToXContentFragment {
public static final Text[] EMPTY_ARRAY = new Text[0];
@ -36,31 +31,46 @@ public final class Text implements Comparable<Text>, ToXContentFragment {
return texts;
}
private BytesReference bytes;
private String text;
private UTF8Bytes bytes;
private String string;
private int hash;
private int stringLength = -1;
public Text(BytesReference bytes) {
/**
* Construct a Text from encoded UTF8Bytes. Since no string length is specified, {@link #stringLength()}
* will perform a string conversion to measure the string length.
*/
public Text(UTF8Bytes bytes) {
this.bytes = bytes;
}
public Text(String text) {
this.text = text;
/**
* Construct a Text from encoded UTF8Bytes and an explicit string length. Used to avoid string conversion
* in {@link #stringLength()}. The provided stringLength should match the value that would
* be calculated by {@link Text#Text(UTF8Bytes)}.
*/
public Text(UTF8Bytes bytes, int stringLength) {
this.bytes = bytes;
this.stringLength = stringLength;
}
public Text(String string) {
this.string = string;
}
/**
* Whether a {@link BytesReference} view of the data is already materialized.
* Whether an {@link UTF8Bytes} view of the data is already materialized.
*/
public boolean hasBytes() {
return bytes != null;
}
/**
* Returns a {@link BytesReference} view of the data.
*/
public BytesReference bytes() {
@Override
public UTF8Bytes bytes() {
if (bytes == null) {
bytes = new BytesArray(text.getBytes(StandardCharsets.UTF_8));
var byteBuff = StandardCharsets.UTF_8.encode(string);
assert byteBuff.hasArray();
bytes = new UTF8Bytes(byteBuff.array(), byteBuff.arrayOffset() + byteBuff.position(), byteBuff.remaining());
}
return bytes;
}
@ -69,14 +79,25 @@ public final class Text implements Comparable<Text>, ToXContentFragment {
* Whether a {@link String} view of the data is already materialized.
*/
public boolean hasString() {
return text != null;
return string != null;
}
/**
* Returns a {@link String} view of the data.
*/
@Override
public String string() {
return text == null ? bytes.utf8ToString() : text;
if (string == null) {
var byteBuff = ByteBuffer.wrap(bytes.bytes(), bytes.offset(), bytes.length());
string = StandardCharsets.UTF_8.decode(byteBuff).toString();
assert (stringLength < 0) || (string.length() == stringLength);
}
return string;
}
@Override
public int stringLength() {
if (stringLength < 0) {
stringLength = string().length();
}
return stringLength;
}
@Override
@ -115,8 +136,7 @@ public final class Text implements Comparable<Text>, ToXContentFragment {
} else {
// TODO: TextBytesOptimization we can use a buffer here to convert it? maybe add a
// request to jackson to support InputStream as well?
BytesRef br = this.bytes().toBytesRef();
return builder.utf8Value(br.bytes, br.offset, br.length);
return builder.utf8Value(bytes.bytes(), bytes.offset(), bytes.length());
}
}
}

View file

@ -0,0 +1,61 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.xcontent;
import java.nio.ByteBuffer;
public interface XContentString {
record UTF8Bytes(byte[] bytes, int offset, int length) implements Comparable<UTF8Bytes> {
public UTF8Bytes(byte[] bytes) {
this(bytes, 0, bytes.length);
}
@Override
public int compareTo(UTF8Bytes o) {
if (this.bytes == o.bytes && this.offset == o.offset && this.length == o.length) {
return 0;
}
return ByteBuffer.wrap(bytes, offset, length).compareTo(ByteBuffer.wrap(o.bytes, o.offset, o.length));
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
return this.compareTo((UTF8Bytes) o) == 0;
}
@Override
public int hashCode() {
return ByteBuffer.wrap(bytes, offset, length).hashCode();
}
}
/**
* Returns a {@link String} view of the data.
*/
String string();
/**
* Returns an encoded {@link UTF8Bytes} view of the data.
*/
UTF8Bytes bytes();
/**
* Returns the number of characters in the represented string.
*/
int stringLength();
}

View file

@ -0,0 +1,190 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.xcontent;
import org.elasticsearch.test.ESTestCase;
import java.nio.charset.StandardCharsets;
public class TextTests extends ESTestCase {
public void testConvertToBytes() {
String value = randomUnicodeOfLength(randomInt(128));
byte[] encodedArr = value.getBytes(StandardCharsets.UTF_8);
var encoded = new XContentString.UTF8Bytes(encodedArr);
var text = new Text(value);
assertTrue(text.hasString());
assertFalse(text.hasBytes());
assertEquals(value, text.string());
assertEquals(encoded, text.bytes());
assertTrue(text.hasString());
assertTrue(text.hasBytes());
// Ensure the conversion didn't mess up subsequent calls
assertEquals(value, text.string());
assertEquals(encoded, text.bytes());
assertSame(text.bytes(), text.bytes());
}
public void testConvertToString() {
String value = randomUnicodeOfLength(randomInt(128));
byte[] encodedArr = value.getBytes(StandardCharsets.UTF_8);
var encoded = new XContentString.UTF8Bytes(encodedArr);
var text = new Text(encoded);
assertFalse(text.hasString());
assertTrue(text.hasBytes());
assertEquals(value, text.string());
assertEquals(encoded, text.bytes());
assertTrue(text.hasString());
assertTrue(text.hasBytes());
// Ensure the conversion didn't mess up subsequent calls
assertEquals(value, text.string());
assertEquals(encoded, text.bytes());
assertSame(encoded, text.bytes());
}
public void testStringLength() {
int stringLength = randomInt(128);
String value = randomUnicodeOfLength(stringLength);
byte[] encodedArr = value.getBytes(StandardCharsets.UTF_8);
var encoded = new XContentString.UTF8Bytes(encodedArr);
{
var text = new Text(value);
assertTrue(text.hasString());
assertEquals(stringLength, text.stringLength());
}
{
var text = new Text(encoded);
assertFalse(text.hasString());
assertEquals(stringLength, text.stringLength());
assertTrue(text.hasString());
}
{
var text = new Text(encoded, stringLength);
assertFalse(text.hasString());
assertEquals(stringLength, text.stringLength());
assertFalse(text.hasString());
}
}
public void testEquals() {
String value = randomUnicodeOfLength(randomInt(128));
byte[] encodedArr = value.getBytes(StandardCharsets.UTF_8);
var encoded = new XContentString.UTF8Bytes(encodedArr);
{
var text1 = new Text(value);
var text2 = new Text(value);
assertTrue(text1.equals(text2));
}
{
var text1 = new Text(value);
var text2 = new Text(encoded);
assertTrue(text1.equals(text2));
}
{
var text1 = new Text(encoded);
var text2 = new Text(encoded);
assertTrue(text1.equals(text2));
}
}
public void testCompareTo() {
String value1 = randomUnicodeOfLength(randomInt(128));
byte[] encodedArr1 = value1.getBytes(StandardCharsets.UTF_8);
var encoded1 = new XContentString.UTF8Bytes(encodedArr1);
{
var text1 = new Text(value1);
var text2 = new Text(value1);
assertEquals(0, text1.compareTo(text2));
}
{
var text1 = new Text(value1);
var text2 = new Text(encoded1);
assertEquals(0, text1.compareTo(text2));
}
{
var text1 = new Text(encoded1);
var text2 = new Text(encoded1);
assertEquals(0, text1.compareTo(text2));
}
String value2 = randomUnicodeOfLength(randomInt(128));
byte[] encodedArr2 = value2.getBytes(StandardCharsets.UTF_8);
var encoded2 = new XContentString.UTF8Bytes(encodedArr2);
int compSign = (int) Math.signum(encoded1.compareTo(encoded2));
{
var text1 = new Text(value1);
var text2 = new Text(value2);
assertEquals(compSign, (int) Math.signum(text1.compareTo(text2)));
}
{
var text1 = new Text(value1);
var text2 = new Text(encoded2);
assertEquals(compSign, (int) Math.signum(text1.compareTo(text2)));
}
{
var text1 = new Text(encoded1);
var text2 = new Text(value2);
assertEquals(compSign, (int) Math.signum(text1.compareTo(text2)));
}
{
var text1 = new Text(encoded1);
var text2 = new Text(encoded2);
assertEquals(compSign, (int) Math.signum(text1.compareTo(text2)));
}
}
public void testRandomized() {
int stringLength = randomInt(128);
String value = randomUnicodeOfLength(stringLength);
byte[] encodedArr = value.getBytes(StandardCharsets.UTF_8);
var encoded = new XContentString.UTF8Bytes(encodedArr);
Text text = switch (randomInt(2)) {
case 0 -> new Text(value);
case 1 -> new Text(encoded);
default -> new Text(encoded, stringLength);
};
for (int i = 0; i < 20; i++) {
switch (randomInt(5)) {
case 0 -> assertEquals(encoded, text.bytes());
case 1 -> assertSame(text.bytes(), text.bytes());
case 2 -> assertEquals(value, text.string());
case 3 -> assertEquals(value, text.toString());
case 4 -> assertEquals(stringLength, text.stringLength());
case 5 -> assertEquals(new Text(value), text);
}
}
}
}

View file

@ -11,7 +11,7 @@ package org.elasticsearch.example.customsuggester;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggester;

View file

@ -11,7 +11,7 @@ package org.elasticsearch.example.customsuggester;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.XContentBuilder;

View file

@ -36,7 +36,6 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.CountDown;
@ -56,6 +55,7 @@ import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskManager;
import org.elasticsearch.threadpool.Scheduler;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xcontent.Text;
import java.util.ArrayList;
import java.util.Collections;

View file

@ -13,8 +13,8 @@ import org.elasticsearch.common.Priority;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.xcontent.Text;
import java.io.IOException;

View file

@ -23,12 +23,13 @@ import org.elasticsearch.common.bytes.ReleasableBytesReference;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.core.CharArrays;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.XContentString;
import java.io.EOFException;
import java.io.FilterInputStream;
@ -385,19 +386,28 @@ public abstract class StreamInput extends InputStream {
return new BigInteger(readString());
}
private Text readText(int length) throws IOException {
byte[] bytes = new byte[length];
if (length > 0) {
readBytes(bytes, 0, length);
}
var encoded = new XContentString.UTF8Bytes(bytes);
return new Text(encoded);
}
@Nullable
public Text readOptionalText() throws IOException {
int length = readInt();
if (length == -1) {
return null;
}
return new Text(readBytesReference(length));
return readText(length);
}
public Text readText() throws IOException {
// use StringAndBytes so we can cache the string if it's ever converted to it
// use Text so we can cache the string if it's ever converted to it
int length = readInt();
return new Text(readBytesReference(length));
return readText(length);
}
@Nullable

View file

@ -22,11 +22,11 @@ import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.Writeable.Writer;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.ByteUtils;
import org.elasticsearch.core.CharArrays;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
@ -419,7 +419,8 @@ public abstract class StreamOutput extends OutputStream {
writeInt(spare.length());
write(spare.bytes(), 0, spare.length());
} else {
BytesReference bytes = text.bytes();
var encoded = text.bytes();
BytesReference bytes = new BytesArray(encoded.bytes(), encoded.offset(), encoded.length());
writeInt(bytes.length());
bytes.writeTo(this);
}

View file

@ -20,7 +20,6 @@ import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -39,6 +38,7 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.transport.LeakTracker;
import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;

View file

@ -12,10 +12,10 @@ package org.elasticsearch.search;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.xcontent.Text;
import java.io.IOException;
import java.util.Objects;

View file

@ -21,7 +21,6 @@ import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
@ -34,6 +33,7 @@ import org.elasticsearch.lucene.search.uhighlight.QueryMaxAnalyzedOffset;
import org.elasticsearch.lucene.search.uhighlight.Snippet;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.xcontent.Text;
import java.io.IOException;
import java.text.BreakIterator;

View file

@ -23,7 +23,6 @@ import org.apache.lucene.search.vectorhighlight.SimpleFragListBuilder;
import org.apache.lucene.search.vectorhighlight.SingleFragListBuilder;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.TextSearchInfo;
@ -33,6 +32,7 @@ import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext.Field;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext.FieldOptions;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.xcontent.Text;
import java.io.IOException;
import java.text.BreakIterator;

View file

@ -12,7 +12,7 @@ package org.elasticsearch.search.fetch.subphase.highlight;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.XContentBuilder;

View file

@ -24,12 +24,12 @@ import org.apache.lucene.search.highlight.SimpleSpanFragmenter;
import org.apache.lucene.search.highlight.TextFragment;
import org.apache.lucene.util.BytesRefHash;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.lucene.search.uhighlight.QueryMaxAnalyzedOffset;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.xcontent.Text;
import java.io.IOException;
import java.util.ArrayList;

View file

@ -19,12 +19,12 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.sort.SortAndFormats;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;

View file

@ -14,13 +14,13 @@ import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.XContentBuilder;

View file

@ -19,10 +19,10 @@ import org.apache.lucene.search.suggest.document.CompletionQuery;
import org.apache.lucene.search.suggest.document.TopSuggestDocs;
import org.apache.lucene.search.suggest.document.TopSuggestDocsCollector;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.xcontent.Text;
import java.io.IOException;
import java.util.Collections;

View file

@ -14,12 +14,12 @@ import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;

View file

@ -19,7 +19,6 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryBuilder;
@ -31,6 +30,7 @@ import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.phrase.NoisyChannelSpellChecker.Result;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser;

View file

@ -11,9 +11,9 @@ package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
import org.elasticsearch.xcontent.Text;
import java.io.IOException;
import java.util.Objects;

View file

@ -16,11 +16,11 @@ import org.apache.lucene.search.spell.SuggestWord;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.XContentString;
import java.io.IOException;
import java.util.ArrayList;
@ -47,7 +47,9 @@ public final class TermSuggester extends Suggester<TermSuggestionContext> {
indexReader,
suggestion.getDirectSpellCheckerSettings().suggestMode()
);
Text key = new Text(new BytesArray(token.term.bytes()));
var termBytes = token.term.bytes();
var termEncoded = new XContentString.UTF8Bytes(termBytes.bytes, termBytes.offset, termBytes.length);
Text key = new Text(termEncoded);
TermSuggestion.Entry resultEntry = new TermSuggestion.Entry(key, token.startOffset, token.endOffset - token.startOffset);
for (SuggestWord suggestWord : suggestedWords) {
Text word = new Text(suggestWord.string);
@ -96,7 +98,9 @@ public final class TermSuggester extends Suggester<TermSuggestionContext> {
TermSuggestion termSuggestion = new TermSuggestion(name, suggestion.getSize(), suggestion.getDirectSpellCheckerSettings().sort());
List<Token> tokens = queryTerms(suggestion, spare);
for (Token token : tokens) {
Text key = new Text(new BytesArray(token.term.bytes()));
var termBytes = token.term.bytes();
var termEncoded = new XContentString.UTF8Bytes(termBytes.bytes, termBytes.offset, termBytes.length);
Text key = new Text(termEncoded);
TermSuggestion.Entry resultEntry = new TermSuggestion.Entry(key, token.startOffset, token.endOffset - token.startOffset);
termSuggestion.addTerm(resultEntry);
}

View file

@ -10,12 +10,12 @@ package org.elasticsearch.search.suggest.term;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.suggest.SortBy;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;

View file

@ -11,9 +11,9 @@ package org.elasticsearch.action.admin.cluster.tasks;
import org.elasticsearch.cluster.service.PendingClusterTask;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.test.AbstractChunkedSerializingTestCase;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.Text;
import java.util.ArrayList;

View file

@ -30,7 +30,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.common.util.concurrent.EsExecutors;
@ -71,6 +70,7 @@ import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xcontent.Text;
import org.junit.After;
import org.junit.Before;

View file

@ -13,7 +13,6 @@ import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.Index;
@ -40,6 +39,7 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.transport.RemoteClusterService;
import org.elasticsearch.xcontent.Text;
import org.junit.Before;
import java.time.ZoneId;

View file

@ -16,13 +16,13 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.core.PathUtils;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.NamedObjectNotFoundException;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContent;
@ -34,6 +34,7 @@ import org.elasticsearch.xcontent.XContentParseException;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParser.Token;
import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentString;
import org.elasticsearch.xcontent.XContentType;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
@ -43,6 +44,7 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.time.DayOfWeek;
@ -376,15 +378,18 @@ public abstract class BaseXContentTestCase extends ESTestCase {
assertResult("{'text':''}", () -> builder().startObject().field("text", new Text("")).endObject());
assertResult("{'text':'foo bar'}", () -> builder().startObject().field("text", new Text("foo bar")).endObject());
final BytesReference random = new BytesArray(randomBytes());
XContentBuilder builder = builder().startObject().field("text", new Text(random)).endObject();
final var random = randomBytes();
XContentBuilder builder = builder().startObject().field("text", new Text(new XContentString.UTF8Bytes(random))).endObject();
try (XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder))) {
assertSame(parser.nextToken(), Token.START_OBJECT);
assertSame(parser.nextToken(), Token.FIELD_NAME);
assertEquals(parser.currentName(), "text");
assertTrue(parser.nextToken().isValue());
assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(random.utf8ToString()));
assertThat(
new BytesRef(parser.charBuffer()).utf8ToString(),
equalTo(StandardCharsets.UTF_8.decode(ByteBuffer.wrap(random)).toString())
);
assertSame(parser.nextToken(), Token.END_OBJECT);
assertNull(parser.nextToken());
}
@ -590,7 +595,10 @@ public abstract class BaseXContentTestCase extends ESTestCase {
objects.put("{'objects':[1,1,2,3,5,8,13]}", new Object[] { 1L, 1L, 2L, 3L, 5L, 8L, 13L });
objects.put("{'objects':[1,1,2,3,5,8]}", new Object[] { (short) 1, (short) 1, (short) 2, (short) 3, (short) 5, (short) 8 });
objects.put("{'objects':['a','b','c']}", new Object[] { "a", "b", "c" });
objects.put("{'objects':['a','b','c']}", new Object[] { new Text("a"), new Text(new BytesArray("b")), new Text("c") });
objects.put(
"{'objects':['a','b','c']}",
new Object[] { new Text("a"), new Text(new XContentString.UTF8Bytes("b".getBytes(StandardCharsets.UTF_8))), new Text("c") }
);
objects.put("{'objects':null}", null);
objects.put("{'objects':[null,null,null]}", new Object[] { null, null, null });
objects.put("{'objects':['OPEN','CLOSE']}", IndexMetadata.State.values());
@ -636,7 +644,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
object.put("{'object':1}", (short) 1);
object.put("{'object':'string'}", "string");
object.put("{'object':'a'}", new Text("a"));
object.put("{'object':'b'}", new Text(new BytesArray("b")));
object.put("{'object':'b'}", new Text(new XContentString.UTF8Bytes("b".getBytes(StandardCharsets.UTF_8))));
object.put("{'object':null}", null);
object.put("{'object':'OPEN'}", IndexMetadata.State.OPEN);
object.put("{'object':'NM'}", DistanceUnit.NAUTICALMILES);

View file

@ -8,8 +8,8 @@
*/
package org.elasticsearch.search.fetch.subphase.highlight;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.xcontent.Text;
import java.util.ArrayList;
import java.util.List;

View file

@ -12,9 +12,9 @@ package org.elasticsearch.search.fetch.subphase.highlight;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;

View file

@ -44,7 +44,6 @@ import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchShardTask;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.core.CheckedConsumer;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.ParsedQuery;
@ -61,6 +60,7 @@ import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.test.TestSearchContext;
import org.elasticsearch.xcontent.Text;
import org.hamcrest.Matchers;
import org.junit.AfterClass;
import org.junit.BeforeClass;

View file

@ -21,7 +21,6 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.search.DocValueFormat;
@ -30,6 +29,7 @@ import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortAndFormats;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.XContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;

View file

@ -10,7 +10,6 @@
package org.elasticsearch.search.suggest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitTests;
import org.elasticsearch.search.SearchResponseUtils;
@ -18,6 +17,7 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.search.suggest.completion.CompletionSuggestion.Entry.Option;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.ObjectParser;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentType;

View file

@ -17,7 +17,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.core.CheckedFunction;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.SearchModule;
@ -33,6 +32,7 @@ import org.elasticsearch.test.TransportVersionUtils;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.ObjectParser;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;

View file

@ -10,7 +10,6 @@
package org.elasticsearch.search.suggest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
@ -20,6 +19,7 @@ import org.elasticsearch.search.suggest.term.TermSuggestion;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.ObjectParser;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentType;

View file

@ -10,11 +10,11 @@
package org.elasticsearch.search.suggest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestion;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentType;

View file

@ -10,7 +10,6 @@
package org.elasticsearch.search.suggest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
@ -22,6 +21,7 @@ import org.elasticsearch.search.suggest.term.TermSuggestion;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.NamedObjectNotFoundException;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContent;
import org.elasticsearch.xcontent.XContentParser;

View file

@ -10,11 +10,11 @@
package org.elasticsearch.search.suggest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.suggest.term.TermSuggestion;
import org.elasticsearch.search.suggest.term.TermSuggestion.Entry.Option;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentType;

View file

@ -9,9 +9,9 @@
package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.Text;
import java.util.ArrayList;
import java.util.Collections;

View file

@ -14,7 +14,6 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.QueryBuilders;
@ -38,6 +37,7 @@ import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.vectors.KnnSearchBuilder;
import org.elasticsearch.search.vectors.RescoreVectorBuilder;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser;

View file

@ -24,7 +24,6 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.core.RefCounted;
@ -55,6 +54,7 @@ import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.InstantiatingObjectParser;
import org.elasticsearch.xcontent.ObjectParser;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;

View file

@ -15,7 +15,6 @@ import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.StoredFields;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperServiceTestCase;
import org.elasticsearch.index.mapper.ParsedDocument;
@ -30,6 +29,7 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase;
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.fetch.subphase.highlight.PlainHighlighter;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.xcontent.Text;
import java.io.IOException;
import java.util.ArrayList;

View file

@ -21,7 +21,6 @@ import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.DenseVectorFieldType;
@ -34,6 +33,7 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils;
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.vectors.VectorData;
import org.elasticsearch.xcontent.Text;
import org.elasticsearch.xpack.core.ml.search.SparseVectorQueryWrapper;
import org.elasticsearch.xpack.inference.mapper.OffsetSourceField;
import org.elasticsearch.xpack.inference.mapper.OffsetSourceFieldMapper;