Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
io.camunda.operate.util.ElasticsearchUtil Maven / Gradle / Ivy
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under
* one or more contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright ownership.
* Licensed under the Camunda License 1.0. You may not use this file
* except in compliance with the Camunda License 1.0.
*/
package io.camunda.operate.util;
import static io.camunda.operate.util.CollectionUtil.map;
import static io.camunda.operate.util.CollectionUtil.throwAwayNullElements;
import static java.util.Arrays.asList;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.AUTO_SLICES_VALUE;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.camunda.operate.entities.HitEntity;
import io.camunda.operate.exceptions.ArchiverException;
import io.camunda.operate.exceptions.OperateRuntimeException;
import io.camunda.operate.exceptions.PersistenceException;
import io.camunda.operate.schema.templates.AbstractTemplateDescriptor;
import io.camunda.operate.schema.templates.EventTemplate;
import io.camunda.operate.schema.templates.IncidentTemplate;
import io.camunda.operate.schema.templates.TemplateDescriptor;
import java.io.IOException;
import java.time.Instant;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.*;
import org.elasticsearch.client.tasks.GetTaskRequest;
import org.elasticsearch.client.tasks.GetTaskResponse;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.ReindexRequest;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.tasks.RawTaskStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
public abstract class ElasticsearchUtil {
public static final int SCROLL_KEEP_ALIVE_MS = 60000;
public static final int TERMS_AGG_SIZE = 10000;
public static final int QUERY_MAX_SIZE = 10000;
public static final int TOPHITS_AGG_SIZE = 100;
public static final int UPDATE_RETRY_COUNT = 3;
public static final Function SEARCH_HIT_ID_TO_LONG =
(hit) -> Long.valueOf(hit.getId());
public static final Function SEARCH_HIT_ID_TO_STRING = SearchHit::getId;
public static RequestOptions requestOptions = RequestOptions.DEFAULT;
private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchUtil.class);
public static void setRequestOptions(final RequestOptions newRequestOptions) {
requestOptions = newRequestOptions;
}
public static CompletableFuture searchAsync(
final SearchRequest searchRequest,
final Executor executor,
final RestHighLevelClient esClient) {
final var searchFuture = new CompletableFuture();
esClient.searchAsync(
searchRequest,
RequestOptions.DEFAULT,
new DelegatingActionListener<>(searchFuture, executor));
return searchFuture;
}
public static CompletableFuture reindexAsyncWithConnectionRelease(
final ThreadPoolTaskScheduler executor,
final ReindexRequest reindexRequest,
final String sourceIndexName,
final RestHighLevelClient esClient) {
final CompletableFuture reindexFuture = new CompletableFuture<>();
try {
final String taskId =
esClient.submitReindexTask(reindexRequest, RequestOptions.DEFAULT).getTask();
LOGGER.debug("Reindexing started for index {}. Task id: {}", sourceIndexName, taskId);
reindexFuture.complete(taskId);
} catch (final IOException ex) {
reindexFuture.completeExceptionally(ex);
}
return reindexFuture.thenCompose(
(tId) -> checkTaskResult(executor, tId, sourceIndexName, "reindex", esClient));
}
public static CompletableFuture deleteAsyncWithConnectionRelease(
final ThreadPoolTaskScheduler executor,
final String sourceIndexName,
final String idFieldName,
final List idValues,
final ObjectMapper objectMapper,
final RestHighLevelClient esClient) {
final CompletableFuture deleteRequestFuture = new CompletableFuture<>();
try {
final String query = termsQuery(idFieldName, idValues).toString();
final Request deleteWithTaskRequest =
new Request(HttpPost.METHOD_NAME, String.format("/%s/_delete_by_query", sourceIndexName));
deleteWithTaskRequest.setJsonEntity(String.format("{\"query\": %s }", query));
deleteWithTaskRequest.addParameter("wait_for_completion", "false");
deleteWithTaskRequest.addParameter("slices", AUTO_SLICES_VALUE);
deleteWithTaskRequest.addParameter("conflicts", "proceed");
final Response response = esClient.getLowLevelClient().performRequest(deleteWithTaskRequest);
if (!(response.getStatusLine().getStatusCode() == HttpStatus.SC_OK)) {
final HttpEntity entity = response.getEntity();
final String errorMsg =
String.format(
"Exception occurred when performing deletion. Status code: %s, error: %s",
response.getStatusLine().getStatusCode(),
entity == null ? "" : EntityUtils.toString(entity));
deleteRequestFuture.completeExceptionally(new ArchiverException(errorMsg));
}
final Map bodyMap =
objectMapper.readValue(response.getEntity().getContent(), Map.class);
final String taskId = (String) bodyMap.get("task");
LOGGER.debug("Deletion started for index {}. Task id {}", sourceIndexName, taskId);
deleteRequestFuture.complete(taskId);
} catch (final IOException ex) {
deleteRequestFuture.completeExceptionally(ex);
}
return deleteRequestFuture.thenCompose(
(tId) -> checkTaskResult(executor, tId, sourceIndexName, "delete", esClient));
}
private static CompletableFuture checkTaskResult(
final ThreadPoolTaskScheduler executor,
final String taskId,
final String sourceIndexName,
final String operation,
final RestHighLevelClient esClient) {
final CompletableFuture checkTaskResult = new CompletableFuture<>();
final BackoffIdleStrategy idleStrategy = new BackoffIdleStrategy(1_000, 1.2f, 5_000);
final Runnable checkTaskResultRunnable =
new Runnable() {
@Override
public void run() {
try {
// extract nodeId and taskId
final String[] taskIdParts = taskId.split(":");
final GetTaskRequest getTaskRequest =
new GetTaskRequest(taskIdParts[0], Long.parseLong(taskIdParts[1]));
final Optional getTaskResponseOptional =
esClient.tasks().get(getTaskRequest, RequestOptions.DEFAULT);
final GetTaskResponse getTaskResponse =
getTaskResponseOptional.orElseThrow(
() -> new OperateRuntimeException("Task was not found: " + taskId));
if (getTaskResponse.isCompleted()) {
final RawTaskStatus status =
(RawTaskStatus) getTaskResponse.getTaskInfo().getStatus();
final long total = getTotalAffectedFromTask(sourceIndexName, operation, status);
checkTaskResult.complete(total);
} else {
idleStrategy.idle();
executor.schedule(
this, Date.from(Instant.now().plusMillis(idleStrategy.idleTime())));
}
} catch (final Exception e) {
checkTaskResult.completeExceptionally(e);
}
}
};
executor.submit(checkTaskResultRunnable);
return checkTaskResult;
}
private static long getTotalAffectedFromTask(
final String sourceIndexName, final String operation, final RawTaskStatus status) {
// parse and check task status
final Map statusMap = status.toMap();
final long total = (Integer) statusMap.get("total");
final long created = (Integer) statusMap.get("created");
final long updated = (Integer) statusMap.get("updated");
final long deleted = (Integer) statusMap.get("deleted");
if (created + updated + deleted < total) {
// there were some failures
final String errorMsg =
String.format(
"Failures occurred when performing operation %s on source index %s. Check Elasticsearch logs.",
operation, sourceIndexName);
throw new OperateRuntimeException(errorMsg);
}
LOGGER.debug("Operation {} succeeded on source index {}.", operation, sourceIndexName);
return total;
}
public static SearchRequest createSearchRequest(final TemplateDescriptor template) {
return createSearchRequest(template, QueryType.ALL);
}
/* CREATE QUERIES */
public static SearchRequest createSearchRequest(
final TemplateDescriptor template, final QueryType queryType) {
final SearchRequest searchRequest = new SearchRequest(whereToSearch(template, queryType));
return searchRequest;
}
private static String whereToSearch(
final TemplateDescriptor template, final QueryType queryType) {
switch (queryType) {
case ONLY_RUNTIME:
return template.getFullQualifiedName();
case ALL:
default:
return template.getAlias();
}
}
public static QueryBuilder joinWithOr(
final BoolQueryBuilder boolQueryBuilder, final QueryBuilder... queries) {
final List notNullQueries = throwAwayNullElements(queries);
for (final QueryBuilder query : notNullQueries) {
boolQueryBuilder.should(query);
}
return boolQueryBuilder;
}
/**
* Join queries with OR clause. If 0 queries are passed for wrapping, then null is returned. If 1
* parameter is passed, it will be returned back as ia. Otherwise, the new BoolQuery will be
* created and returned.
*
* @param queries
* @return
*/
public static QueryBuilder joinWithOr(final QueryBuilder... queries) {
final List notNullQueries = throwAwayNullElements(queries);
switch (notNullQueries.size()) {
case 0:
return null;
case 1:
return notNullQueries.get(0);
default:
final BoolQueryBuilder boolQ = boolQuery();
for (final QueryBuilder query : notNullQueries) {
boolQ.should(query);
}
return boolQ;
}
}
public static QueryBuilder joinWithOr(final Collection queries) {
return joinWithOr(queries.toArray(new QueryBuilder[queries.size()]));
}
/**
* Join queries with AND clause. If 0 queries are passed for wrapping, then null is returned. If 1
* parameter is passed, it will be returned back as ia. Otherwise, the new BoolQuery will be
* created and returned.
*
* @param queries
* @return
*/
public static QueryBuilder joinWithAnd(final QueryBuilder... queries) {
final List notNullQueries = throwAwayNullElements(queries);
switch (notNullQueries.size()) {
case 0:
return null;
case 1:
return notNullQueries.get(0);
default:
final BoolQueryBuilder boolQ = boolQuery();
for (final QueryBuilder query : notNullQueries) {
boolQ.must(query);
}
return boolQ;
}
}
public static BoolQueryBuilder createMatchNoneQuery() {
return boolQuery().must(QueryBuilders.wrapperQuery("{\"match_none\": {}}"));
}
public static void processBulkRequest(
final RestHighLevelClient esClient,
final BulkRequest bulkRequest,
final long maxBulkRequestSizeInBytes)
throws PersistenceException {
processBulkRequest(esClient, bulkRequest, false, maxBulkRequestSizeInBytes);
}
/* EXECUTE QUERY */
public static void processBulkRequest(
final RestHighLevelClient esClient,
final BulkRequest bulkRequest,
final boolean refreshImmediately,
final long maxBulkRequestSizeInBytes)
throws PersistenceException {
if (bulkRequest.estimatedSizeInBytes() > maxBulkRequestSizeInBytes) {
divideLargeBulkRequestAndProcess(
esClient, bulkRequest, refreshImmediately, maxBulkRequestSizeInBytes);
} else {
processLimitedBulkRequest(esClient, bulkRequest, refreshImmediately);
}
}
private static void divideLargeBulkRequestAndProcess(
final RestHighLevelClient esClient,
final BulkRequest bulkRequest,
final boolean refreshImmediately,
final long maxBulkRequestSizeInBytes)
throws PersistenceException {
LOGGER.debug(
"Bulk request has {} bytes > {} max bytes ({} requests). Will divide it into smaller bulk requests.",
bulkRequest.estimatedSizeInBytes(),
maxBulkRequestSizeInBytes,
bulkRequest.requests().size());
int requestCount = 0;
final List> requests = bulkRequest.requests();
BulkRequest limitedBulkRequest = new BulkRequest();
while (requestCount < requests.size()) {
final DocWriteRequest> nextRequest = requests.get(requestCount);
if (nextRequest.ramBytesUsed() > maxBulkRequestSizeInBytes) {
throw new PersistenceException(
String.format(
"One of the request with size of %d bytes is greater than max allowed %d bytes",
nextRequest.ramBytesUsed(), maxBulkRequestSizeInBytes));
}
final long wholeSize = limitedBulkRequest.estimatedSizeInBytes() + nextRequest.ramBytesUsed();
if (wholeSize < maxBulkRequestSizeInBytes) {
limitedBulkRequest.add(nextRequest);
} else {
LOGGER.debug(
"Submit bulk of {} requests, size {} bytes.",
limitedBulkRequest.requests().size(),
limitedBulkRequest.estimatedSizeInBytes());
processLimitedBulkRequest(esClient, limitedBulkRequest, refreshImmediately);
limitedBulkRequest = new BulkRequest();
limitedBulkRequest.add(nextRequest);
}
requestCount++;
}
if (!limitedBulkRequest.requests().isEmpty()) {
LOGGER.debug(
"Submit bulk of {} requests, size {} bytes.",
limitedBulkRequest.requests().size(),
limitedBulkRequest.estimatedSizeInBytes());
processLimitedBulkRequest(esClient, limitedBulkRequest, refreshImmediately);
}
}
@SuppressWarnings("checkstyle:NestedIfDepth")
private static void processLimitedBulkRequest(
final RestHighLevelClient esClient, BulkRequest bulkRequest, final boolean refreshImmediately)
throws PersistenceException {
if (bulkRequest.requests().size() > 0) {
try {
LOGGER.debug("************* FLUSH BULK START *************");
if (refreshImmediately) {
bulkRequest = bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
}
final BulkResponse bulkItemResponses = esClient.bulk(bulkRequest, RequestOptions.DEFAULT);
final BulkItemResponse[] items = bulkItemResponses.getItems();
for (int i = 0; i < items.length; i++) {
final BulkItemResponse responseItem = items[i];
if (responseItem.isFailed() && !isEventConflictError(responseItem)) {
if (isMissingIncident(responseItem)) {
// the case when incident was already archived to dated index, but must be updated
final DocWriteRequest> request = bulkRequest.requests().get(i);
final String incidentId = extractIncidentId(responseItem.getFailure().getMessage());
final String indexName =
getIndexNames(request.index() + "alias", asList(incidentId), esClient)
.get(incidentId);
request.index(indexName);
if (indexName == null) {
LOGGER.warn("Index is not known for incident: " + incidentId);
} else {
esClient.update((UpdateRequest) request, RequestOptions.DEFAULT);
}
} else {
LOGGER.error(
String.format(
"%s failed for type [%s] and id [%s]: %s",
responseItem.getOpType(),
responseItem.getIndex(),
responseItem.getId(),
responseItem.getFailureMessage()),
responseItem.getFailure().getCause());
throw new PersistenceException(
"Operation failed: " + responseItem.getFailureMessage(),
responseItem.getFailure().getCause(),
responseItem.getItemId());
}
}
}
LOGGER.debug("************* FLUSH BULK FINISH *************");
} catch (final IOException ex) {
throw new PersistenceException(
"Error when processing bulk request against Elasticsearch: " + ex.getMessage(), ex);
}
}
}
private static String extractIncidentId(final String errorMessage) {
final Pattern fniPattern = Pattern.compile(".*\\[_doc\\]\\[(\\d*)\\].*");
final Matcher matcher = fniPattern.matcher(errorMessage);
matcher.matches();
return matcher.group(1);
}
private static boolean isMissingIncident(final BulkItemResponse responseItem) {
return responseItem.getIndex().contains(IncidentTemplate.INDEX_NAME)
&& responseItem.getFailure().getStatus().equals(RestStatus.NOT_FOUND);
}
private static boolean isEventConflictError(final BulkItemResponse responseItem) {
return responseItem.getIndex().contains(EventTemplate.INDEX_NAME)
&& responseItem.getFailure().getStatus().equals(RestStatus.CONFLICT);
}
/* MAP QUERY RESULTS */
public static List mapSearchHits(
final List hits, final ObjectMapper objectMapper, final JavaType valueType) {
return map(hits, h -> fromSearchHit(h.getSourceAsString(), objectMapper, valueType));
}
public static List mapSearchHits(
final HitEntity[] searchHits, final ObjectMapper objectMapper, final Class clazz) {
return map(
searchHits,
(searchHit) -> fromSearchHit(searchHit.getSourceAsString(), objectMapper, clazz));
}
public static List mapSearchHits(
final SearchHit[] searchHits, final Function searchHitMapper) {
return map(searchHits, searchHitMapper);
}
public static List mapSearchHits(
final SearchHit[] searchHits, final ObjectMapper objectMapper, final Class clazz) {
return map(
searchHits,
(searchHit) -> fromSearchHit(searchHit.getSourceAsString(), objectMapper, clazz));
}
public static T fromSearchHit(
final String searchHitString, final ObjectMapper objectMapper, final Class clazz) {
final T entity;
try {
entity = objectMapper.readValue(searchHitString, clazz);
} catch (final IOException e) {
LOGGER.error(
String.format(
"Error while reading entity of type %s from Elasticsearch!", clazz.getName()),
e);
throw new OperateRuntimeException(
String.format(
"Error while reading entity of type %s from Elasticsearch!", clazz.getName()),
e);
}
return entity;
}
public static List mapSearchHits(
final SearchHit[] searchHits, final ObjectMapper objectMapper, final JavaType valueType) {
return map(
searchHits,
(searchHit) -> fromSearchHit(searchHit.getSourceAsString(), objectMapper, valueType));
}
public static T fromSearchHit(
final String searchHitString, final ObjectMapper objectMapper, final JavaType valueType) {
final T entity;
try {
entity = objectMapper.readValue(searchHitString, valueType);
} catch (final IOException e) {
LOGGER.error(
String.format(
"Error while reading entity of type %s from Elasticsearch!", valueType.toString()),
e);
throw new OperateRuntimeException(
String.format(
"Error while reading entity of type %s from Elasticsearch!", valueType.toString()),
e);
}
return entity;
}
public static List scroll(
final SearchRequest searchRequest,
final Class clazz,
final ObjectMapper objectMapper,
final RestHighLevelClient esClient)
throws IOException {
return scroll(searchRequest, clazz, objectMapper, esClient, null, null);
}
public static List scroll(
final SearchRequest searchRequest,
final Class clazz,
final ObjectMapper objectMapper,
final RestHighLevelClient esClient,
final Consumer searchHitsProcessor,
final Consumer aggsProcessor)
throws IOException {
return scroll(
searchRequest, clazz, objectMapper, esClient, null, searchHitsProcessor, aggsProcessor);
}
public static List scroll(
final SearchRequest searchRequest,
final Class clazz,
final ObjectMapper objectMapper,
final RestHighLevelClient esClient,
final Function searchHitMapper,
final Consumer searchHitsProcessor,
final Consumer aggsProcessor)
throws IOException {
searchRequest.scroll(TimeValue.timeValueMillis(SCROLL_KEEP_ALIVE_MS));
SearchResponse response = esClient.search(searchRequest, RequestOptions.DEFAULT);
// call aggregations processor
if (aggsProcessor != null) {
aggsProcessor.accept(response.getAggregations());
}
final List result = new ArrayList<>();
String scrollId = response.getScrollId();
SearchHits hits = response.getHits();
while (hits.getHits().length != 0) {
if (searchHitMapper != null) {
result.addAll(mapSearchHits(hits.getHits(), searchHitMapper));
} else {
result.addAll(mapSearchHits(hits.getHits(), objectMapper, clazz));
}
// call response processor
if (searchHitsProcessor != null) {
searchHitsProcessor.accept(response.getHits());
}
final SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId);
scrollRequest.scroll(TimeValue.timeValueMillis(SCROLL_KEEP_ALIVE_MS));
response = esClient.scroll(scrollRequest, RequestOptions.DEFAULT);
scrollId = response.getScrollId();
hits = response.getHits();
}
clearScroll(scrollId, esClient);
return result;
}
public static void scroll(
final SearchRequest searchRequest,
final Consumer searchHitsProcessor,
final RestHighLevelClient esClient)
throws IOException {
scroll(searchRequest, searchHitsProcessor, esClient, SCROLL_KEEP_ALIVE_MS);
}
public static void scroll(
final SearchRequest searchRequest,
final Consumer searchHitsProcessor,
final RestHighLevelClient esClient,
final long scrollKeepAlive)
throws IOException {
final var scrollKeepAliveTimeValue = TimeValue.timeValueMillis(scrollKeepAlive);
searchRequest.scroll(scrollKeepAliveTimeValue);
SearchResponse response = esClient.search(searchRequest, RequestOptions.DEFAULT);
String scrollId = response.getScrollId();
SearchHits hits = response.getHits();
while (hits.getHits().length != 0) {
// call response processor
if (searchHitsProcessor != null) {
searchHitsProcessor.accept(response.getHits());
}
final SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId);
scrollRequest.scroll(scrollKeepAliveTimeValue);
response = esClient.scroll(scrollRequest, RequestOptions.DEFAULT);
scrollId = response.getScrollId();
hits = response.getHits();
}
clearScroll(scrollId, esClient);
}
public static void scrollWith(
final SearchRequest searchRequest,
final RestHighLevelClient esClient,
final Consumer searchHitsProcessor)
throws IOException {
scrollWith(searchRequest, esClient, searchHitsProcessor, null, null);
}
public static void scrollWith(
final SearchRequest searchRequest,
final RestHighLevelClient esClient,
final Consumer searchHitsProcessor,
final Consumer aggsProcessor,
final Consumer firstResponseConsumer)
throws IOException {
searchRequest.scroll(TimeValue.timeValueMillis(SCROLL_KEEP_ALIVE_MS));
SearchResponse response = esClient.search(searchRequest, RequestOptions.DEFAULT);
if (firstResponseConsumer != null) {
firstResponseConsumer.accept(response.getHits());
}
// call aggregations processor
if (aggsProcessor != null) {
aggsProcessor.accept(response.getAggregations());
}
String scrollId = response.getScrollId();
SearchHits hits = response.getHits();
while (hits.getHits().length != 0) {
// call response processor
if (searchHitsProcessor != null) {
searchHitsProcessor.accept(response.getHits());
}
final SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId);
scrollRequest.scroll(TimeValue.timeValueMillis(SCROLL_KEEP_ALIVE_MS));
response = esClient.scroll(scrollRequest, RequestOptions.DEFAULT);
scrollId = response.getScrollId();
hits = response.getHits();
}
clearScroll(scrollId, esClient);
}
public static void clearScroll(final String scrollId, final RestHighLevelClient esClient) {
if (scrollId != null) {
// clear the scroll
final ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
clearScrollRequest.addScrollId(scrollId);
try {
esClient.clearScroll(clearScrollRequest, RequestOptions.DEFAULT);
} catch (final Exception e) {
LOGGER.warn("Error occurred when clearing the scroll with id [{}]", scrollId);
}
}
}
public static List scrollKeysToList(
final SearchRequest request, final RestHighLevelClient esClient) throws IOException {
final List result = new ArrayList<>();
final Consumer collectIds =
(hits) -> {
result.addAll(map(hits.getHits(), SEARCH_HIT_ID_TO_LONG));
};
scrollWith(request, esClient, collectIds, null, collectIds);
return result;
}
public static List scrollFieldToList(
final SearchRequest request, final String fieldName, final RestHighLevelClient esClient)
throws IOException {
final List result = new ArrayList<>();
final Function searchHitFieldToString =
(searchHit) -> (T) searchHit.getSourceAsMap().get(fieldName);
final Consumer collectFields =
(hits) -> {
result.addAll(map(hits.getHits(), searchHitFieldToString));
};
scrollWith(request, esClient, collectFields, null, collectFields);
return result;
}
public static Set scrollIdsToSet(
final SearchRequest request, final RestHighLevelClient esClient) throws IOException {
final Set result = new HashSet<>();
final Consumer collectIds =
(hits) -> {
result.addAll(map(hits.getHits(), SEARCH_HIT_ID_TO_STRING));
};
scrollWith(request, esClient, collectIds, null, collectIds);
return result;
}
public static Map getIndexNames(
final String aliasName, final Collection ids, final RestHighLevelClient esClient) {
final Map indexNames = new HashMap<>();
final SearchRequest piRequest =
new SearchRequest(aliasName)
.source(
new SearchSourceBuilder()
.query(idsQuery().addIds(ids.toArray(String[]::new)))
.fetchSource(false));
try {
scrollWith(
piRequest,
esClient,
sh -> {
indexNames.putAll(
Arrays.stream(sh.getHits())
.collect(
Collectors.toMap(
hit -> {
return hit.getId();
},
hit -> {
return hit.getIndex();
})));
});
} catch (final IOException e) {
throw new OperateRuntimeException(e.getMessage(), e);
}
return indexNames;
}
public static Map getIndexNames(
final AbstractTemplateDescriptor template,
final Collection ids,
final RestHighLevelClient esClient) {
final Map indexNames = new HashMap<>();
final SearchRequest piRequest =
ElasticsearchUtil.createSearchRequest(template)
.source(
new SearchSourceBuilder()
.query(idsQuery().addIds(ids.toArray(String[]::new)))
.fetchSource(false));
try {
scrollWith(
piRequest,
esClient,
sh -> {
indexNames.putAll(
Arrays.stream(sh.getHits())
.collect(
Collectors.toMap(
hit -> {
return hit.getId();
},
hit -> {
return hit.getIndex();
})));
});
} catch (final IOException e) {
throw new OperateRuntimeException(e.getMessage(), e);
}
return indexNames;
}
public static Map> getIndexNamesAsList(
final AbstractTemplateDescriptor template,
final Collection ids,
final RestHighLevelClient esClient) {
final Map> indexNames = new ConcurrentHashMap<>();
final SearchRequest piRequest =
ElasticsearchUtil.createSearchRequest(template)
.source(
new SearchSourceBuilder()
.query(idsQuery().addIds(ids.toArray(String[]::new)))
.fetchSource(false));
try {
scrollWith(
piRequest,
esClient,
sh -> {
Arrays.stream(sh.getHits())
.collect(
Collectors.groupingBy(
SearchHit::getId,
Collectors.mapping(SearchHit::getIndex, Collectors.toList())))
.forEach(
(key, value) ->
indexNames.merge(
key,
value,
(v1, v2) -> {
v1.addAll(v2);
return v1;
}));
});
} catch (final IOException e) {
throw new OperateRuntimeException(e.getMessage(), e);
}
return indexNames;
}
public static RequestOptions requestOptionsFor(final int maxSizeInBytes) {
final RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
options.setHttpAsyncResponseConsumerFactory(
new HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory(maxSizeInBytes));
return options.build();
}
public static SortOrder reverseOrder(final SortOrder sortOrder) {
if (sortOrder.equals(SortOrder.ASC)) {
return SortOrder.DESC;
} else {
return SortOrder.ASC;
}
}
private static final class DelegatingActionListener
implements ActionListener {
private final CompletableFuture future;
private final Executor executorDelegate;
private DelegatingActionListener(
final CompletableFuture future, final Executor executor) {
this.future = future;
executorDelegate = executor;
}
@Override
public void onResponse(final Response response) {
executorDelegate.execute(() -> future.complete(response));
}
@Override
public void onFailure(final Exception e) {
executorDelegate.execute(() -> future.completeExceptionally(e));
}
}
public enum QueryType {
ONLY_RUNTIME,
ALL
}
}