Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.transport.RemoteClusterAware;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.lucene.Lucene.readExplanation;
import static org.elasticsearch.common.lucene.Lucene.writeExplanation;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
import static org.elasticsearch.common.xcontent.XContentParserUtils.parseFieldsValue;
import static org.elasticsearch.search.fetch.subphase.highlight.HighlightField.readHighlightField;
/**
* A single search hit.
*
* @see SearchHits
*/
public final class SearchHit implements Streamable, ToXContentObject, Iterable {
private transient int docId;
private static final float DEFAULT_SCORE = Float.NEGATIVE_INFINITY;
private float score = DEFAULT_SCORE;
private Text id;
private Text type;
private NestedIdentity nestedIdentity;
private long version = -1;
private long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
private long primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM;
private BytesReference source;
private Map fields = emptyMap();
private Map highlightFields = null;
private SearchSortValues sortValues = SearchSortValues.EMPTY;
private String[] matchedQueries = Strings.EMPTY_ARRAY;
private Explanation explanation;
@Nullable
private SearchShardTarget shard;
//These two fields normally get set when setting the shard target, so they hold the same values as the target thus don't get
//serialized over the wire. When parsing hits back from xcontent though, in most of the cases (whenever explanation is disabled)
//we can't rebuild the shard target object so we need to set these manually for users retrieval.
private transient String index;
private transient String clusterAlias;
private Map sourceAsMap;
private Map innerHits;
SearchHit() {
}
public SearchHit(int docId) {
this(docId, null, null, null);
}
public SearchHit(int docId, String id, Text type, Map fields) {
this(docId, id, type, null, fields);
}
public SearchHit(int nestedTopDocId, String id, Text type, NestedIdentity nestedIdentity, Map fields) {
this.docId = nestedTopDocId;
if (id != null) {
this.id = new Text(id);
} else {
this.id = null;
}
this.type = type;
this.nestedIdentity = nestedIdentity;
this.fields = fields;
}
public int docId() {
return this.docId;
}
public void score(float score) {
this.score = score;
}
/**
* The score.
*/
public float getScore() {
return this.score;
}
public void version(long version) {
this.version = version;
}
/**
* The version of the hit.
*/
public long getVersion() {
return this.version;
}
public void setSeqNo(long seqNo) {
this.seqNo = seqNo;
}
public void setPrimaryTerm(long primaryTerm) {
this.primaryTerm = primaryTerm;
}
/**
* returns the sequence number of the last modification to the document, or {@link SequenceNumbers#UNASSIGNED_SEQ_NO}
* if not requested.
**/
public long getSeqNo() {
return this.seqNo;
}
/**
* returns the primary term of the last modification to the document, or {@link SequenceNumbers#UNASSIGNED_PRIMARY_TERM}
* if not requested. */
public long getPrimaryTerm() {
return this.primaryTerm;
}
/**
* The index of the hit.
*/
public String getIndex() {
return this.index;
}
/**
* The id of the document.
*/
public String getId() {
return id != null ? id.string() : null;
}
/**
* The type of the document.
*/
public String getType() {
return type != null ? type.string() : null;
}
/**
* If this is a nested hit then nested reference information is returned otherwise null is returned.
*/
public NestedIdentity getNestedIdentity() {
return nestedIdentity;
}
/**
* Returns bytes reference, also uncompress the source if needed.
*/
public BytesReference getSourceRef() {
if (this.source == null) {
return null;
}
try {
this.source = CompressorFactory.uncompressIfNeeded(this.source);
return this.source;
} catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e);
}
}
/**
* Sets representation, might be compressed....
*/
public SearchHit sourceRef(BytesReference source) {
this.source = source;
this.sourceAsMap = null;
return this;
}
/**
* Is the source available or not. A source with no fields will return true. This will return false if {@code fields} doesn't contain
* {@code _source} or if source is disabled in the mapping.
*/
public boolean hasSource() {
return source != null;
}
/**
* The source of the document as string (can be {@code null}).
*/
public String getSourceAsString() {
if (source == null) {
return null;
}
try {
return XContentHelper.convertToJson(getSourceRef(), false);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to convert source to a json string");
}
}
/**
* The source of the document as a map (can be {@code null}).
*/
public Map getSourceAsMap() {
if (source == null) {
return null;
}
if (sourceAsMap != null) {
return sourceAsMap;
}
sourceAsMap = SourceLookup.sourceAsMap(source);
return sourceAsMap;
}
@Override
public Iterator iterator() {
return fields.values().iterator();
}
/**
* The hit field matching the given field name.
*/
public DocumentField field(String fieldName) {
return getFields().get(fieldName);
}
/**
* A map of hit fields (from field name to hit fields) if additional fields
* were required to be loaded.
*/
public Map getFields() {
return fields == null ? emptyMap() : fields;
}
// returns the fields without handling null cases
public Map fieldsOrNull() {
return fields;
}
public void fields(Map fields) {
this.fields = fields;
}
/**
* A map of highlighted fields.
*/
public Map getHighlightFields() {
return highlightFields == null ? emptyMap() : highlightFields;
}
public void highlightFields(Map highlightFields) {
this.highlightFields = highlightFields;
}
public void sortValues(Object[] sortValues, DocValueFormat[] sortValueFormats) {
sortValues(new SearchSortValues(sortValues, sortValueFormats));
}
public void sortValues(SearchSortValues sortValues) {
this.sortValues = sortValues;
}
/**
* An array of the (formatted) sort values used.
*/
public Object[] getSortValues() {
return sortValues.getFormattedSortValues();
}
/**
* An array of the (raw) sort values used.
*/
public Object[] getRawSortValues() {
return sortValues.getRawSortValues();
}
/**
* If enabled, the explanation of the search hit.
*/
public Explanation getExplanation() {
return explanation;
}
public void explanation(Explanation explanation) {
this.explanation = explanation;
}
/**
* The shard of the search hit.
*/
public SearchShardTarget getShard() {
return shard;
}
public void shard(SearchShardTarget target) {
if (innerHits != null) {
for (SearchHits innerHits : innerHits.values()) {
for (SearchHit innerHit : innerHits) {
innerHit.shard(target);
}
}
}
this.shard = target;
if (target != null) {
this.index = target.getIndex();
this.clusterAlias = target.getClusterAlias();
}
}
/**
* Returns the cluster alias this hit comes from or null if it comes from a local cluster
*/
public String getClusterAlias() {
return clusterAlias;
}
public void matchedQueries(String[] matchedQueries) {
this.matchedQueries = matchedQueries;
}
/**
* The set of query and filter names the query matched with. Mainly makes sense for compound filters and queries.
*/
public String[] getMatchedQueries() {
return this.matchedQueries;
}
/**
* @return Inner hits or null if there are none
*/
public Map getInnerHits() {
return innerHits;
}
public void setInnerHits(Map innerHits) {
this.innerHits = innerHits;
}
public static class Fields {
static final String _INDEX = "_index";
static final String _TYPE = "_type";
static final String _ID = "_id";
static final String _VERSION = "_version";
static final String _SEQ_NO = "_seq_no";
static final String _PRIMARY_TERM = "_primary_term";
static final String _SCORE = "_score";
static final String FIELDS = "fields";
static final String HIGHLIGHT = "highlight";
static final String SORT = "sort";
static final String MATCHED_QUERIES = "matched_queries";
static final String _EXPLANATION = "_explanation";
static final String VALUE = "value";
static final String DESCRIPTION = "description";
static final String DETAILS = "details";
static final String INNER_HITS = "inner_hits";
static final String _SHARD = "_shard";
static final String _NODE = "_node";
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
toInnerXContent(builder, params);
builder.endObject();
return builder;
}
// public because we render hit as part of completion suggestion option
public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException {
List metaFields = new ArrayList<>();
List otherFields = new ArrayList<>();
if (fields != null && !fields.isEmpty()) {
for (DocumentField field : fields.values()) {
if (field.getValues().isEmpty()) {
continue;
}
if (field.isMetadataField()) {
metaFields.add(field);
} else {
otherFields.add(field);
}
}
}
// For inner_hit hits shard is null and that is ok, because the parent search hit has all this information.
// Even if this was included in the inner_hit hits this would be the same, so better leave it out.
if (getExplanation() != null && shard != null) {
builder.field(Fields._SHARD, shard.getShardId());
builder.field(Fields._NODE, shard.getNodeIdText());
}
if (index != null) {
builder.field(Fields._INDEX, RemoteClusterAware.buildRemoteIndexName(clusterAlias, index));
}
if (type != null) {
builder.field(Fields._TYPE, type);
}
if (id != null) {
builder.field(Fields._ID, id);
}
if (nestedIdentity != null) {
nestedIdentity.toXContent(builder, params);
}
if (version != -1) {
builder.field(Fields._VERSION, version);
}
if (seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) {
builder.field(Fields._SEQ_NO, seqNo);
builder.field(Fields._PRIMARY_TERM, primaryTerm);
}
if (Float.isNaN(score)) {
builder.nullField(Fields._SCORE);
} else {
builder.field(Fields._SCORE, score);
}
for (DocumentField field : metaFields) {
// _ignored is the only multi-valued meta field
// TODO: can we avoid having an exception here?
if (field.getName().equals(IgnoredFieldMapper.NAME)) {
builder.field(field.getName(), field.getValues());
} else {
builder.field(field.getName(), field.