Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.RestApiVersion;
import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.fetch.subphase.LookupField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.ObjectParser;
import org.elasticsearch.xcontent.ObjectParser.ValueType;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParser.Token;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.lucene.Lucene.readExplanation;
import static org.elasticsearch.common.lucene.Lucene.writeExplanation;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* A single search hit.
*
* @see SearchHits
*/
public final class SearchHit implements Writeable, ToXContentObject, Iterable {
private final transient int docId;
private static final float DEFAULT_SCORE = Float.NaN;
private float score = DEFAULT_SCORE;
private final Text id;
private final NestedIdentity nestedIdentity;
private long version = -1;
private long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
private long primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM;
private BytesReference source;
private Map documentFields;
private final Map metaFields;
private Map highlightFields = null;
private SearchSortValues sortValues = SearchSortValues.EMPTY;
private String[] matchedQueries = Strings.EMPTY_ARRAY;
private Explanation explanation;
@Nullable
private SearchShardTarget shard;
// These two fields normally get set when setting the shard target, so they hold the same values as the target thus don't get
// serialized over the wire. When parsing hits back from xcontent though, in most of the cases (whenever explanation is disabled)
// we can't rebuild the shard target object so we need to set these manually for users retrieval.
private transient String index;
private transient String clusterAlias;
private Map sourceAsMap;
private Map innerHits;
// used only in tests
public SearchHit(int docId) {
this(docId, null, null, null);
}
public SearchHit(int docId, String id, Map documentFields, Map metaFields) {
this(docId, id, null, documentFields, metaFields);
}
public SearchHit(
int nestedTopDocId,
String id,
NestedIdentity nestedIdentity,
Map documentFields,
Map metaFields
) {
this.docId = nestedTopDocId;
if (id != null) {
this.id = new Text(id);
} else {
this.id = null;
}
this.nestedIdentity = nestedIdentity;
this.documentFields = documentFields == null ? emptyMap() : documentFields;
this.metaFields = metaFields == null ? emptyMap() : metaFields;
}
public SearchHit(StreamInput in) throws IOException {
docId = -1;
score = in.readFloat();
id = in.readOptionalText();
if (in.getVersion().before(Version.V_8_0_0)) {
in.readOptionalText();
}
nestedIdentity = in.readOptionalWriteable(NestedIdentity::new);
version = in.readLong();
seqNo = in.readZLong();
primaryTerm = in.readVLong();
source = in.readBytesReference();
if (source.length() == 0) {
source = null;
}
if (in.readBoolean()) {
explanation = readExplanation(in);
}
if (in.getVersion().onOrAfter(Version.V_7_8_0)) {
documentFields = in.readMap(StreamInput::readString, DocumentField::new);
metaFields = in.readMap(StreamInput::readString, DocumentField::new);
} else {
Map fields = readFields(in);
documentFields = new HashMap<>();
metaFields = new HashMap<>();
fields.forEach(
(fieldName, docField) -> (MapperService.isMetadataFieldStatic(fieldName) ? metaFields : documentFields).put(
fieldName,
docField
)
);
}
int size = in.readVInt();
if (size == 0) {
highlightFields = emptyMap();
} else if (size == 1) {
HighlightField field = new HighlightField(in);
highlightFields = singletonMap(field.name(), field);
} else {
Map highlightFields = new HashMap<>();
for (int i = 0; i < size; i++) {
HighlightField field = new HighlightField(in);
highlightFields.put(field.name(), field);
}
this.highlightFields = unmodifiableMap(highlightFields);
}
sortValues = new SearchSortValues(in);
size = in.readVInt();
if (size > 0) {
matchedQueries = new String[size];
for (int i = 0; i < size; i++) {
matchedQueries[i] = in.readString();
}
}
// we call the setter here because that also sets the local index parameter
shard(in.readOptionalWriteable(SearchShardTarget::new));
size = in.readVInt();
if (size > 0) {
innerHits = Maps.newMapWithExpectedSize(size);
for (int i = 0; i < size; i++) {
String key = in.readString();
SearchHits value = new SearchHits(in);
innerHits.put(key, value);
}
} else {
innerHits = null;
}
}
private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME);
private static Map readFields(StreamInput in) throws IOException {
Map fields;
int size = in.readVInt();
if (size == 0) {
fields = emptyMap();
} else if (size == 1) {
DocumentField hitField = new DocumentField(in);
fields = singletonMap(hitField.getName(), hitField);
} else {
fields = Maps.newMapWithExpectedSize(size);
for (int i = 0; i < size; i++) {
DocumentField field = new DocumentField(in);
fields.put(field.getName(), field);
}
fields = unmodifiableMap(fields);
}
return fields;
}
private static void writeFields(StreamOutput out, Map fields) throws IOException {
if (fields == null) {
out.writeVInt(0);
} else {
out.writeCollection(fields.values());
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeFloat(score);
out.writeOptionalText(id);
if (out.getVersion().before(Version.V_8_0_0)) {
out.writeOptionalText(SINGLE_MAPPING_TYPE);
}
out.writeOptionalWriteable(nestedIdentity);
out.writeLong(version);
out.writeZLong(seqNo);
out.writeVLong(primaryTerm);
out.writeBytesReference(source);
if (explanation == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
writeExplanation(out, explanation);
}
if (out.getVersion().onOrAfter(Version.V_7_8_0)) {
out.writeMap(documentFields, StreamOutput::writeString, (stream, documentField) -> documentField.writeTo(stream));
out.writeMap(metaFields, StreamOutput::writeString, (stream, documentField) -> documentField.writeTo(stream));
} else {
writeFields(out, this.getFields());
}
if (highlightFields == null) {
out.writeVInt(0);
} else {
out.writeCollection(highlightFields.values());
}
sortValues.writeTo(out);
if (matchedQueries.length == 0) {
out.writeVInt(0);
} else {
out.writeStringArray(matchedQueries);
}
out.writeOptionalWriteable(shard);
if (innerHits == null) {
out.writeVInt(0);
} else {
out.writeMap(innerHits, StreamOutput::writeString, (o, v) -> v.writeTo(o));
}
}
public int docId() {
return this.docId;
}
public void score(float score) {
this.score = score;
}
/**
* The score.
*/
public float getScore() {
return this.score;
}
public void version(long version) {
this.version = version;
}
/**
* The version of the hit.
*/
public long getVersion() {
return this.version;
}
public void setSeqNo(long seqNo) {
this.seqNo = seqNo;
}
public void setPrimaryTerm(long primaryTerm) {
this.primaryTerm = primaryTerm;
}
/**
* returns the sequence number of the last modification to the document, or {@link SequenceNumbers#UNASSIGNED_SEQ_NO}
* if not requested.
**/
public long getSeqNo() {
return this.seqNo;
}
/**
* returns the primary term of the last modification to the document, or {@link SequenceNumbers#UNASSIGNED_PRIMARY_TERM}
* if not requested. */
public long getPrimaryTerm() {
return this.primaryTerm;
}
/**
* The index of the hit.
*/
public String getIndex() {
return this.index;
}
/**
* The id of the document.
*/
public String getId() {
return id != null ? id.string() : null;
}
/**
* If this is a nested hit then nested reference information is returned otherwise null is returned.
*/
public NestedIdentity getNestedIdentity() {
return nestedIdentity;
}
/**
* Returns bytes reference, also uncompress the source if needed.
*/
public BytesReference getSourceRef() {
if (this.source == null) {
return null;
}
try {
this.source = CompressorFactory.uncompressIfNeeded(this.source);
return this.source;
} catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e);
}
}
/**
* Sets representation, might be compressed....
*/
public SearchHit sourceRef(BytesReference source) {
this.source = source;
this.sourceAsMap = null;
return this;
}
/**
* Is the source available or not. A source with no fields will return true. This will return false if {@code fields} doesn't contain
* {@code _source} or if source is disabled in the mapping.
*/
public boolean hasSource() {
return source != null;
}
/**
* The source of the document as string (can be {@code null}).
*/
public String getSourceAsString() {
if (source == null) {
return null;
}
try {
return XContentHelper.convertToJson(getSourceRef(), false);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to convert source to a json string");
}
}
/**
* The source of the document as a map (can be {@code null}).
*/
public Map getSourceAsMap() {
if (source == null) {
return null;
}
if (sourceAsMap != null) {
return sourceAsMap;
}
sourceAsMap = SourceLookup.sourceAsMap(source);
return sourceAsMap;
}
@Override
public Iterator iterator() {
// need to join the fields and metadata fields
Map allFields = this.getFields();
return allFields.values().iterator();
}
/**
* The hit field matching the given field name.
*/
public DocumentField field(String fieldName) {
DocumentField result = documentFields.get(fieldName);
if (result != null) {
return result;
} else {
return metaFields.get(fieldName);
}
}
/*
* Adds a new DocumentField to the map in case both parameters are not null.
* */
public void setDocumentField(String fieldName, DocumentField field) {
if (fieldName == null || field == null) return;
if (documentFields.size() == 0) this.documentFields = new HashMap<>();
this.documentFields.put(fieldName, field);
}
/**
* @return a map of metadata fields for this hit
*/
public Map getMetadataFields() {
return Collections.unmodifiableMap(metaFields);
}
/**
* @return a map of non-metadata fields requested for this hit
*/
public Map getDocumentFields() {
return Collections.unmodifiableMap(documentFields);
}
/**
* A map of hit fields (from field name to hit fields) if additional fields
* were required to be loaded. Includes both document and metadata fields.
*/
public Map getFields() {
if (metaFields.size() > 0 || documentFields.size() > 0) {
final Map fields = new HashMap<>();
fields.putAll(metaFields);
fields.putAll(documentFields);
return fields;
} else {
return emptyMap();
}
}
/**
* Whether this search hit has any lookup fields
*/
public boolean hasLookupFields() {
return getDocumentFields().values().stream().anyMatch(doc -> doc.getLookupFields().isEmpty() == false);
}
/**
* Resolve the lookup fields with the given results and merge them as regular fetch fields.
*/
public void resolveLookupFields(Map> lookupResults) {
if (lookupResults.isEmpty()) {
return;
}
final List fields = new ArrayList<>(documentFields.keySet());
for (String field : fields) {
documentFields.computeIfPresent(field, (k, docField) -> {
if (docField.getLookupFields().isEmpty()) {
return docField;
}
final List