
org.elasticsearch.index.reindex.BulkByScrollResponse Maven / Gradle / Ivy
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.reindex;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.reindex.BulkByScrollTask.Status;
import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xcontent.ObjectParser;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParser.Token;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.util.Objects.requireNonNull;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.core.TimeValue.timeValueNanos;
/**
* Response used for actions that index many documents using a scroll request.
*/
public class BulkByScrollResponse extends ActionResponse implements ToXContentFragment {
private TimeValue took;
private BulkByScrollTask.Status status;
private List bulkFailures;
private List searchFailures;
private boolean timedOut;
private static final String TOOK_FIELD = "took";
private static final String TIMED_OUT_FIELD = "timed_out";
private static final String FAILURES_FIELD = "failures";
@SuppressWarnings("unchecked")
private static final ObjectParser PARSER = new ObjectParser<>(
"bulk_by_scroll_response",
true,
BulkByScrollResponseBuilder::new
);
static {
PARSER.declareLong(BulkByScrollResponseBuilder::setTook, new ParseField(TOOK_FIELD));
PARSER.declareBoolean(BulkByScrollResponseBuilder::setTimedOut, new ParseField(TIMED_OUT_FIELD));
PARSER.declareObjectArray(BulkByScrollResponseBuilder::setFailures, (p, c) -> parseFailure(p), new ParseField(FAILURES_FIELD));
// since the result of BulkByScrollResponse.Status are mixed we also parse that in this
Status.declareFields(PARSER);
}
public BulkByScrollResponse(StreamInput in) throws IOException {
super(in);
took = in.readTimeValue();
status = new BulkByScrollTask.Status(in);
bulkFailures = in.readList(Failure::new);
searchFailures = in.readList(ScrollableHitSource.SearchFailure::new);
timedOut = in.readBoolean();
}
public BulkByScrollResponse(
TimeValue took,
BulkByScrollTask.Status status,
List bulkFailures,
List searchFailures,
boolean timedOut
) {
this.took = took;
this.status = requireNonNull(status, "Null status not supported");
this.bulkFailures = bulkFailures;
this.searchFailures = searchFailures;
this.timedOut = timedOut;
}
public BulkByScrollResponse(Iterable toMerge, @Nullable String reasonCancelled) {
long mergedTook = 0;
List statuses = new ArrayList<>();
bulkFailures = new ArrayList<>();
searchFailures = new ArrayList<>();
for (BulkByScrollResponse response : toMerge) {
mergedTook = max(mergedTook, response.getTook().nanos());
statuses.add(new BulkByScrollTask.StatusOrException(response.status));
bulkFailures.addAll(response.getBulkFailures());
searchFailures.addAll(response.getSearchFailures());
timedOut |= response.isTimedOut();
}
took = timeValueNanos(mergedTook);
status = new BulkByScrollTask.Status(statuses, reasonCancelled);
}
public TimeValue getTook() {
return took;
}
public BulkByScrollTask.Status getStatus() {
return status;
}
public long getCreated() {
return status.getCreated();
}
public long getTotal() {
return status.getTotal();
}
public long getDeleted() {
return status.getDeleted();
}
public long getUpdated() {
return status.getUpdated();
}
public int getBatches() {
return status.getBatches();
}
public long getVersionConflicts() {
return status.getVersionConflicts();
}
public long getNoops() {
return status.getNoops();
}
/**
* The reason that the request was canceled or null if it hasn't been.
*/
public String getReasonCancelled() {
return status.getReasonCancelled();
}
/**
* The number of times that the request had retry bulk actions.
*/
public long getBulkRetries() {
return status.getBulkRetries();
}
/**
* The number of times that the request had retry search actions.
*/
public long getSearchRetries() {
return status.getSearchRetries();
}
/**
* All of the bulk failures. Version conflicts are only included if the request sets abortOnVersionConflict to true (the default).
*/
public List getBulkFailures() {
return bulkFailures;
}
/**
* All search failures.
*/
public List getSearchFailures() {
return searchFailures;
}
/**
* Did any of the sub-requests that were part of this request timeout?
*/
public boolean isTimedOut() {
return timedOut;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeTimeValue(took);
status.writeTo(out);
out.writeList(bulkFailures);
out.writeList(searchFailures);
out.writeBoolean(timedOut);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(TOOK_FIELD, took.millis());
builder.field(TIMED_OUT_FIELD, timedOut);
status.innerXContent(builder, params);
builder.startArray("failures");
for (Failure failure : bulkFailures) {
builder.startObject();
failure.toXContent(builder, params);
builder.endObject();
}
for (ScrollableHitSource.SearchFailure failure : searchFailures) {
failure.toXContent(builder, params);
}
builder.endArray();
return builder;
}
public static BulkByScrollResponse fromXContent(XContentParser parser) {
return PARSER.apply(parser, null).buildResponse();
}
private static Object parseFailure(XContentParser parser) throws IOException {
ensureExpectedToken(Token.START_OBJECT, parser.currentToken(), parser);
Token token;
String index = null;
String id = null;
Integer status = null;
Integer shardId = null;
String nodeId = null;
ElasticsearchException bulkExc = null;
ElasticsearchException searchExc = null;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
ensureExpectedToken(Token.FIELD_NAME, token, parser);
String name = parser.currentName();
token = parser.nextToken();
if (token == Token.START_ARRAY) {
parser.skipChildren();
} else if (token == Token.START_OBJECT) {
switch (name) {
case SearchFailure.REASON_FIELD -> searchExc = ElasticsearchException.fromXContent(parser);
case Failure.CAUSE_FIELD -> bulkExc = ElasticsearchException.fromXContent(parser);
default -> parser.skipChildren();
}
} else if (token == Token.VALUE_STRING) {
switch (name) {
// This field is the same as SearchFailure.index
case Failure.INDEX_FIELD -> index = parser.text();
case Failure.ID_FIELD -> id = parser.text();
case SearchFailure.NODE_FIELD -> nodeId = parser.text();
}
} else if (token == Token.VALUE_NUMBER) {
switch (name) {
case Failure.STATUS_FIELD -> status = parser.intValue();
case SearchFailure.SHARD_FIELD -> shardId = parser.intValue();
}
}
}
if (bulkExc != null) {
return new Failure(index, id, bulkExc, RestStatus.fromCode(status));
} else if (searchExc != null) {
if (status == null) {
return new SearchFailure(searchExc, index, shardId, nodeId);
} else {
return new SearchFailure(searchExc, index, shardId, nodeId, RestStatus.fromCode(status));
}
} else {
throw new ElasticsearchParseException("failed to parse failures array. At least one of {reason,cause} must be present");
}
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append(getClass().getSimpleName()).append("[");
builder.append("took=").append(took).append(',');
builder.append("timed_out=").append(timedOut).append(',');
status.innerToString(builder);
builder.append(",bulk_failures=").append(getBulkFailures().subList(0, min(3, getBulkFailures().size())));
builder.append(",search_failures=").append(getSearchFailures().subList(0, min(3, getSearchFailures().size())));
return builder.append(']').toString();
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy