
org.opensearch.search.SearchHit Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of opensearch Show documentation
Show all versions of opensearch Show documentation
OpenSearch subproject :server
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.search;
import org.apache.lucene.search.Explanation;
import org.opensearch.LegacyESVersion;
import org.opensearch.OpenSearchParseException;
import org.opensearch.action.OriginalIndices;
import org.opensearch.common.Nullable;
import org.opensearch.common.ParseField;
import org.opensearch.common.ParsingException;
import org.opensearch.common.Strings;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.compress.CompressorFactory;
import org.opensearch.common.document.DocumentField;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.common.io.stream.Writeable;
import org.opensearch.common.text.Text;
import org.opensearch.common.xcontent.ConstructingObjectParser;
import org.opensearch.common.xcontent.ObjectParser;
import org.opensearch.common.xcontent.ObjectParser.ValueType;
import org.opensearch.common.xcontent.ToXContentFragment;
import org.opensearch.common.xcontent.ToXContentObject;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentParser.Token;
import org.opensearch.index.mapper.IgnoredFieldMapper;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.SourceFieldMapper;
import org.opensearch.index.seqno.SequenceNumbers;
import org.opensearch.index.shard.ShardId;
import org.opensearch.search.fetch.subphase.highlight.HighlightField;
import org.opensearch.search.lookup.SourceLookup;
import org.opensearch.transport.RemoteClusterAware;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.Collections.unmodifiableMap;
import static org.opensearch.common.lucene.Lucene.readExplanation;
import static org.opensearch.common.lucene.Lucene.writeExplanation;
import static org.opensearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.opensearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.opensearch.common.xcontent.XContentParserUtils.ensureFieldName;
/**
* A single search hit.
*
* @see SearchHits
*/
public final class SearchHit implements Writeable, ToXContentObject, Iterable {
private final transient int docId;
private static final float DEFAULT_SCORE = Float.NaN;
private float score = DEFAULT_SCORE;
private final Text id;
private final Text type;
private final NestedIdentity nestedIdentity;
private long version = -1;
private long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
private long primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM;
private BytesReference source;
private Map documentFields;
private final Map metaFields;
private Map highlightFields = null;
private SearchSortValues sortValues = SearchSortValues.EMPTY;
private String[] matchedQueries = Strings.EMPTY_ARRAY;
private Explanation explanation;
@Nullable
private SearchShardTarget shard;
// These two fields normally get set when setting the shard target, so they hold the same values as the target thus don't get
// serialized over the wire. When parsing hits back from xcontent though, in most of the cases (whenever explanation is disabled)
// we can't rebuild the shard target object so we need to set these manually for users retrieval.
private transient String index;
private transient String clusterAlias;
private Map sourceAsMap;
private Map innerHits;
// used only in tests
public SearchHit(int docId) {
this(docId, null, null, null, null);
}
public SearchHit(int docId, String id, Text type, Map documentFields, Map metaFields) {
this(docId, id, type, null, documentFields, metaFields);
}
public SearchHit(
int nestedTopDocId,
String id,
Text type,
NestedIdentity nestedIdentity,
Map documentFields,
Map metaFields
) {
this.docId = nestedTopDocId;
if (id != null) {
this.id = new Text(id);
} else {
this.id = null;
}
this.type = type;
this.nestedIdentity = nestedIdentity;
this.documentFields = documentFields == null ? emptyMap() : documentFields;
this.metaFields = metaFields == null ? emptyMap() : metaFields;
}
public SearchHit(StreamInput in) throws IOException {
docId = -1;
score = in.readFloat();
id = in.readOptionalText();
type = in.readOptionalText();
nestedIdentity = in.readOptionalWriteable(NestedIdentity::new);
version = in.readLong();
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_7_0)) {
seqNo = in.readZLong();
primaryTerm = in.readVLong();
}
source = in.readBytesReference();
if (source.length() == 0) {
source = null;
}
if (in.readBoolean()) {
explanation = readExplanation(in);
}
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
documentFields = in.readMap(StreamInput::readString, DocumentField::new);
metaFields = in.readMap(StreamInput::readString, DocumentField::new);
} else {
Map fields = readFields(in);
documentFields = new HashMap<>();
metaFields = new HashMap<>();
fields.forEach(
(fieldName, docField) -> (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(
fieldName,
docField
)
);
}
int size = in.readVInt();
if (size == 0) {
highlightFields = emptyMap();
} else if (size == 1) {
HighlightField field = new HighlightField(in);
highlightFields = singletonMap(field.name(), field);
} else {
Map highlightFields = new HashMap<>();
for (int i = 0; i < size; i++) {
HighlightField field = new HighlightField(in);
highlightFields.put(field.name(), field);
}
this.highlightFields = unmodifiableMap(highlightFields);
}
sortValues = new SearchSortValues(in);
size = in.readVInt();
if (size > 0) {
matchedQueries = new String[size];
for (int i = 0; i < size; i++) {
matchedQueries[i] = in.readString();
}
}
// we call the setter here because that also sets the local index parameter
shard(in.readOptionalWriteable(SearchShardTarget::new));
size = in.readVInt();
if (size > 0) {
innerHits = new HashMap<>(size);
for (int i = 0; i < size; i++) {
String key = in.readString();
SearchHits value = new SearchHits(in);
innerHits.put(key, value);
}
} else {
innerHits = null;
}
}
private Map readFields(StreamInput in) throws IOException {
Map fields;
int size = in.readVInt();
if (size == 0) {
fields = emptyMap();
} else if (size == 1) {
DocumentField hitField = new DocumentField(in);
fields = singletonMap(hitField.getName(), hitField);
} else {
fields = new HashMap<>(size);
for (int i = 0; i < size; i++) {
DocumentField field = new DocumentField(in);
fields.put(field.getName(), field);
}
fields = unmodifiableMap(fields);
}
return fields;
}
private void writeFields(StreamOutput out, Map fields) throws IOException {
if (fields == null) {
out.writeVInt(0);
} else {
out.writeVInt(fields.size());
for (DocumentField field : fields.values()) {
field.writeTo(out);
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeFloat(score);
out.writeOptionalText(id);
out.writeOptionalText(type);
out.writeOptionalWriteable(nestedIdentity);
out.writeLong(version);
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_7_0)) {
out.writeZLong(seqNo);
out.writeVLong(primaryTerm);
}
out.writeBytesReference(source);
if (explanation == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
writeExplanation(out, explanation);
}
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
out.writeMap(documentFields, StreamOutput::writeString, (stream, documentField) -> documentField.writeTo(stream));
out.writeMap(metaFields, StreamOutput::writeString, (stream, documentField) -> documentField.writeTo(stream));
} else {
writeFields(out, this.getFields());
}
if (highlightFields == null) {
out.writeVInt(0);
} else {
out.writeVInt(highlightFields.size());
for (HighlightField highlightField : highlightFields.values()) {
highlightField.writeTo(out);
}
}
sortValues.writeTo(out);
if (matchedQueries.length == 0) {
out.writeVInt(0);
} else {
out.writeVInt(matchedQueries.length);
for (String matchedFilter : matchedQueries) {
out.writeString(matchedFilter);
}
}
out.writeOptionalWriteable(shard);
if (innerHits == null) {
out.writeVInt(0);
} else {
out.writeVInt(innerHits.size());
for (Map.Entry entry : innerHits.entrySet()) {
out.writeString(entry.getKey());
entry.getValue().writeTo(out);
}
}
}
public int docId() {
return this.docId;
}
public void score(float score) {
this.score = score;
}
/**
* The score.
*/
public float getScore() {
return this.score;
}
public void version(long version) {
this.version = version;
}
/**
* The version of the hit.
*/
public long getVersion() {
return this.version;
}
public void setSeqNo(long seqNo) {
this.seqNo = seqNo;
}
public void setPrimaryTerm(long primaryTerm) {
this.primaryTerm = primaryTerm;
}
/**
* returns the sequence number of the last modification to the document, or {@link SequenceNumbers#UNASSIGNED_SEQ_NO}
* if not requested.
**/
public long getSeqNo() {
return this.seqNo;
}
/**
* returns the primary term of the last modification to the document, or {@link SequenceNumbers#UNASSIGNED_PRIMARY_TERM}
* if not requested. */
public long getPrimaryTerm() {
return this.primaryTerm;
}
/**
* The index of the hit.
*/
public String getIndex() {
return this.index;
}
/**
* The id of the document.
*/
public String getId() {
return id != null ? id.string() : null;
}
/**
* The type of the document.
*
* @deprecated Types are in the process of being removed. Instead of using a type, prefer to
* filter on a field on the document.
*/
@Deprecated
public String getType() {
return type != null ? type.string() : null;
}
/**
* If this is a nested hit then nested reference information is returned otherwise null
is returned.
*/
public NestedIdentity getNestedIdentity() {
return nestedIdentity;
}
/**
* Returns bytes reference, also uncompress the source if needed.
*/
public BytesReference getSourceRef() {
if (this.source == null) {
return null;
}
try {
this.source = CompressorFactory.uncompressIfNeeded(this.source);
return this.source;
} catch (IOException e) {
throw new OpenSearchParseException("failed to decompress source", e);
}
}
/**
* Sets representation, might be compressed....
*/
public SearchHit sourceRef(BytesReference source) {
this.source = source;
this.sourceAsMap = null;
return this;
}
/**
* Is the source available or not. A source with no fields will return true. This will return false if {@code fields} doesn't contain
* {@code _source} or if source is disabled in the mapping.
*/
public boolean hasSource() {
return source != null;
}
/**
* The source of the document as string (can be {@code null}).
*/
public String getSourceAsString() {
if (source == null) {
return null;
}
try {
return XContentHelper.convertToJson(getSourceRef(), false);
} catch (IOException e) {
throw new OpenSearchParseException("failed to convert source to a json string");
}
}
/**
* The source of the document as a map (can be {@code null}).
*/
public Map getSourceAsMap() {
if (source == null) {
return null;
}
if (sourceAsMap != null) {
return sourceAsMap;
}
sourceAsMap = SourceLookup.sourceAsMap(source);
return sourceAsMap;
}
@Override
public Iterator iterator() {
// need to join the fields and metadata fields
Map allFields = this.getFields();
return allFields.values().iterator();
}
/**
* The hit field matching the given field name.
*/
public DocumentField field(String fieldName) {
DocumentField result = documentFields.get(fieldName);
if (result != null) {
return result;
} else {
return metaFields.get(fieldName);
}
}
/*
* Adds a new DocumentField to the map in case both parameters are not null.
* */
public void setDocumentField(String fieldName, DocumentField field) {
if (fieldName == null || field == null) return;
if (documentFields.size() == 0) this.documentFields = new HashMap<>();
this.documentFields.put(fieldName, field);
}
/**
* A map of hit fields (from field name to hit fields) if additional fields
* were required to be loaded.
*/
public Map getFields() {
if (metaFields.size() > 0 || documentFields.size() > 0) {
final Map fields = new HashMap<>();
fields.putAll(metaFields);
fields.putAll(documentFields);
return fields;
} else {
return emptyMap();
}
}
/**
* A map of highlighted fields.
*/
public Map getHighlightFields() {
return highlightFields == null ? emptyMap() : highlightFields;
}
public void highlightFields(Map highlightFields) {
this.highlightFields = highlightFields;
}
public void sortValues(Object[] sortValues, DocValueFormat[] sortValueFormats) {
sortValues(new SearchSortValues(sortValues, sortValueFormats));
}
public void sortValues(SearchSortValues sortValues) {
this.sortValues = sortValues;
}
/**
* An array of the (formatted) sort values used.
*/
public Object[] getSortValues() {
return sortValues.getFormattedSortValues();
}
/**
* An array of the (raw) sort values used.
*/
public Object[] getRawSortValues() {
return sortValues.getRawSortValues();
}
/**
* If enabled, the explanation of the search hit.
*/
public Explanation getExplanation() {
return explanation;
}
public void explanation(Explanation explanation) {
this.explanation = explanation;
}
/**
* The shard of the search hit.
*/
public SearchShardTarget getShard() {
return shard;
}
public void shard(SearchShardTarget target) {
if (innerHits != null) {
for (SearchHits innerHits : innerHits.values()) {
for (SearchHit innerHit : innerHits) {
innerHit.shard(target);
}
}
}
this.shard = target;
if (target != null) {
this.index = target.getIndex();
this.clusterAlias = target.getClusterAlias();
}
}
/**
* Returns the cluster alias this hit comes from or null if it comes from a local cluster
*/
public String getClusterAlias() {
return clusterAlias;
}
public void matchedQueries(String[] matchedQueries) {
this.matchedQueries = matchedQueries;
}
/**
* The set of query and filter names the query matched with. Mainly makes sense for compound filters and queries.
*/
public String[] getMatchedQueries() {
return this.matchedQueries;
}
/**
* @return Inner hits or null
if there are none
*/
public Map getInnerHits() {
return innerHits;
}
public void setInnerHits(Map innerHits) {
this.innerHits = innerHits;
}
public static class Fields {
static final String _INDEX = "_index";
static final String _TYPE = "_type";
static final String _ID = "_id";
static final String _VERSION = "_version";
static final String _SEQ_NO = "_seq_no";
static final String _PRIMARY_TERM = "_primary_term";
static final String _SCORE = "_score";
static final String FIELDS = "fields";
static final String HIGHLIGHT = "highlight";
static final String SORT = "sort";
static final String MATCHED_QUERIES = "matched_queries";
static final String _EXPLANATION = "_explanation";
static final String VALUE = "value";
static final String DESCRIPTION = "description";
static final String DETAILS = "details";
static final String INNER_HITS = "inner_hits";
static final String _SHARD = "_shard";
static final String _NODE = "_node";
}
// Following are the keys for storing the metadata fields and regular fields in the aggregation map.
// These do not influence the structure of json serialization: document fields are still stored
// under FIELDS and metadata are still scattered at the root level.
static final String DOCUMENT_FIELDS = "document_fields";
static final String METADATA_FIELDS = "metadata_fields";
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
toInnerXContent(builder, params);
builder.endObject();
return builder;
}
// public because we render hit as part of completion suggestion option
public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException {
// For inner_hit hits shard is null and that is ok, because the parent search hit has all this information.
// Even if this was included in the inner_hit hits this would be the same, so better leave it out.
if (getExplanation() != null && shard != null) {
builder.field(Fields._SHARD, shard.getShardId());
builder.field(Fields._NODE, shard.getNodeIdText());
}
if (index != null) {
builder.field(Fields._INDEX, RemoteClusterAware.buildRemoteIndexName(clusterAlias, index));
}
if (type != null) {
builder.field(Fields._TYPE, type);
}
if (id != null) {
builder.field(Fields._ID, id);
}
if (nestedIdentity != null) {
nestedIdentity.toXContent(builder, params);
}
if (version != -1) {
builder.field(Fields._VERSION, version);
}
if (seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) {
builder.field(Fields._SEQ_NO, seqNo);
builder.field(Fields._PRIMARY_TERM, primaryTerm);
}
if (Float.isNaN(score)) {
builder.nullField(Fields._SCORE);
} else {
builder.field(Fields._SCORE, score);
}
for (DocumentField field : metaFields.values()) {
// ignore empty metadata fields
if (field.getValues().size() == 0) {
continue;
}
// _ignored is the only multi-valued meta field
// TODO: can we avoid having an exception here?
if (field.getName().equals(IgnoredFieldMapper.NAME)) {
builder.field(field.getName(), field.getValues());
} else {
builder.field(field.getName(), field.
© 2015 - 2025 Weber Informatics LLC | Privacy Policy