
io.camunda.tasklist.util.ElasticsearchUtil Maven / Gradle / Ivy
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under
* one or more contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright ownership.
* Licensed under the Camunda License 1.0. You may not use this file
* except in compliance with the Camunda License 1.0.
*/
package io.camunda.tasklist.util;
import static io.camunda.tasklist.util.CollectionUtil.map;
import static io.camunda.tasklist.util.CollectionUtil.throwAwayNullElements;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.idsQuery;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.camunda.tasklist.entities.TasklistEntity;
import io.camunda.tasklist.exceptions.NotFoundException;
import io.camunda.tasklist.exceptions.PersistenceException;
import io.camunda.tasklist.exceptions.TasklistRuntimeException;
import io.camunda.tasklist.schema.indices.IndexDescriptor;
import io.camunda.tasklist.schema.templates.TemplateDescriptor;
import io.camunda.tasklist.tenant.TenantAwareElasticsearchClient;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.index.reindex.ReindexRequest;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class ElasticsearchUtil {
public static final String ZEEBE_INDEX_DELIMITER = "_";
public static final int SCROLL_KEEP_ALIVE_MS = 60000;
public static final int INTERNAL_SCROLL_KEEP_ALIVE_MS =
30000; // this scroll timeout value is used for reindex and delete queries
public static final int QUERY_MAX_SIZE = 10000;
public static final int UPDATE_RETRY_COUNT = 3;
public static final Function SEARCH_HIT_ID_TO_LONG =
(hit) -> Long.valueOf(hit.getId());
public static final Function SEARCH_HIT_ID_TO_STRING = SearchHit::getId;
private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchUtil.class);
public static SearchRequest createSearchRequest(TemplateDescriptor template) {
return createSearchRequest(template, QueryType.ALL);
}
public static SearchHit getRawResponseWithTenantCheck(
final String id,
IndexDescriptor descriptor,
QueryType queryType,
TenantAwareElasticsearchClient tenantAwareClient)
throws IOException {
final QueryBuilder query = idsQuery().addIds(id);
final SearchRequest request =
ElasticsearchUtil.createSearchRequest(descriptor, queryType)
.source(new SearchSourceBuilder().query(constantScoreQuery(query)));
final SearchResponse response = tenantAwareClient.search(request);
if (response.getHits().getTotalHits().value == 1) {
return response.getHits().getHits()[0];
} else if (response.getHits().getTotalHits().value > 1) {
throw new NotFoundException(
String.format("Unique %s with id %s was not found", descriptor.getIndexName(), id));
} else {
throw new NotFoundException(
String.format("%s with id %s was not found", descriptor.getIndexName(), id));
}
}
public static CompletableFuture reindexAsync(
final ReindexRequest reindexRequest,
final Executor executor,
final RestHighLevelClient esClient) {
final var reindexFuture = new CompletableFuture();
esClient.reindexAsync(
reindexRequest,
RequestOptions.DEFAULT,
new DelegatingActionListener<>(reindexFuture, executor));
return reindexFuture;
}
public static CompletableFuture deleteByQueryAsync(
final DeleteByQueryRequest deleteRequest,
final Executor executor,
final RestHighLevelClient esClient) {
final var deleteFuture = new CompletableFuture();
esClient.deleteByQueryAsync(
deleteRequest,
RequestOptions.DEFAULT,
new DelegatingActionListener<>(deleteFuture, executor));
return deleteFuture;
}
public static CompletableFuture searchAsync(
final SearchRequest searchRequest,
final Executor executor,
final RestHighLevelClient esClient) {
final var searchFuture = new CompletableFuture();
esClient.searchAsync(
searchRequest,
RequestOptions.DEFAULT,
new DelegatingActionListener<>(searchFuture, executor));
return searchFuture;
}
public static CompletableFuture scrollAsync(
final SearchScrollRequest scrollRequest,
final Executor executor,
final RestHighLevelClient esClient) {
final var searchFuture = new CompletableFuture();
esClient.scrollAsync(
scrollRequest,
RequestOptions.DEFAULT,
new DelegatingActionListener<>(searchFuture, executor));
return searchFuture;
}
/* CREATE QUERIES */
public static SearchRequest createSearchRequest(IndexDescriptor descriptor, QueryType queryType) {
return new SearchRequest(whereToSearch(descriptor, queryType));
}
public static String whereToSearch(IndexDescriptor descriptor, QueryType queryType) {
switch (queryType) {
case ONLY_RUNTIME:
return descriptor.getFullQualifiedName();
case ALL:
default:
return descriptor.getAlias();
}
}
public static QueryBuilder joinWithOr(
BoolQueryBuilder boolQueryBuilder, QueryBuilder... queries) {
final List notNullQueries = throwAwayNullElements(queries);
for (QueryBuilder query : notNullQueries) {
boolQueryBuilder.should(query);
}
return boolQueryBuilder;
}
/**
* Join queries with OR clause. If 0 queries are passed for wrapping, then null is returned. If 1
* parameter is passed, it will be returned back as ia. Otherwise, the new BoolQuery will be
* created and returned.
*/
public static QueryBuilder joinWithOr(QueryBuilder... queries) {
final List notNullQueries = throwAwayNullElements(queries);
switch (notNullQueries.size()) {
case 0:
return null;
case 1:
return notNullQueries.get(0);
default:
final BoolQueryBuilder boolQ = boolQuery();
for (QueryBuilder query : notNullQueries) {
boolQ.should(query);
}
return boolQ;
}
}
public static QueryBuilder joinWithOr(Collection queries) {
return joinWithOr(queries.toArray(new QueryBuilder[queries.size()]));
}
/**
* Join queries with AND clause. If 0 queries are passed for wrapping, then null is returned. If 1
* parameter is passed, it will be returned back as ia. Otherwise, the new BoolQuery will be
* created and returned.
*/
public static QueryBuilder joinWithAnd(QueryBuilder... queries) {
final List notNullQueries = throwAwayNullElements(queries);
switch (notNullQueries.size()) {
case 0:
return null;
case 1:
return notNullQueries.get(0);
default:
final BoolQueryBuilder boolQ = boolQuery();
for (QueryBuilder query : notNullQueries) {
boolQ.must(query);
}
return boolQ;
}
}
public static QueryBuilder addToBoolMust(BoolQueryBuilder boolQuery, QueryBuilder... queries) {
if (boolQuery.mustNot().size() != 0
|| boolQuery.filter().size() != 0
|| boolQuery.should().size() != 0) {
throw new IllegalArgumentException("BoolQuery with only must elements is expected here.");
}
final List notNullQueries = throwAwayNullElements(queries);
for (QueryBuilder query : notNullQueries) {
boolQuery.must(query);
}
return boolQuery;
}
public static BoolQueryBuilder createMatchNoneQuery() {
return boolQuery().must(QueryBuilders.wrapperQuery("{\"match_none\": {}}"));
}
public static void processBulkRequest(RestHighLevelClient esClient, BulkRequest bulkRequest)
throws PersistenceException {
processBulkRequest(esClient, bulkRequest, RefreshPolicy.NONE);
}
/* EXECUTE QUERY */
public static void processBulkRequest(
RestHighLevelClient esClient, BulkRequest bulkRequest, RefreshPolicy refreshPolicy)
throws PersistenceException {
if (bulkRequest.requests().size() > 0) {
try {
LOGGER.debug("************* FLUSH BULK START *************");
bulkRequest = bulkRequest.setRefreshPolicy(refreshPolicy);
final BulkResponse bulkItemResponses = esClient.bulk(bulkRequest, RequestOptions.DEFAULT);
final BulkItemResponse[] items = bulkItemResponses.getItems();
for (BulkItemResponse responseItem : items) {
if (responseItem.isFailed()) {
LOGGER.error(
String.format(
"%s failed for type [%s] and id [%s]: %s",
responseItem.getOpType(),
responseItem.getIndex(),
responseItem.getId(),
responseItem.getFailureMessage()),
responseItem.getFailure().getCause());
throw new PersistenceException(
"Operation failed: " + responseItem.getFailureMessage(),
responseItem.getFailure().getCause(),
responseItem.getItemId());
}
}
LOGGER.debug("************* FLUSH BULK FINISH *************");
} catch (IOException ex) {
throw new PersistenceException(
"Error when processing bulk request against Elasticsearch: " + ex.getMessage(), ex);
}
}
}
public static void executeUpdate(RestHighLevelClient esClient, UpdateRequest updateRequest)
throws PersistenceException {
try {
esClient.update(updateRequest, RequestOptions.DEFAULT);
} catch (ElasticsearchException | IOException e) {
final String errorMessage =
String.format(
"Update request failed for [%s] and id [%s] with the message [%s].",
updateRequest.index(), updateRequest.id(), e.getMessage());
throw new PersistenceException(errorMessage, e);
}
}
public static List mapSearchHits(
List searchHits, ObjectMapper objectMapper, JavaType valueType) {
return mapSearchHits(
searchHits.toArray(new SearchHit[searchHits.size()]), objectMapper, valueType);
}
/* MAP QUERY RESULTS */
public static List mapSearchHits(
SearchHit[] searchHits, Function searchHitMapper) {
return map(searchHits, searchHitMapper);
}
public static List mapSearchHits(
SearchHit[] searchHits, ObjectMapper objectMapper, Class clazz) {
return map(
searchHits,
(searchHit) -> fromSearchHit(searchHit.getSourceAsString(), objectMapper, clazz));
}
public static T fromSearchHit(
String searchHitString, ObjectMapper objectMapper, Class clazz) {
final T entity;
try {
entity = objectMapper.readValue(searchHitString, clazz);
} catch (IOException e) {
throw new TasklistRuntimeException(
String.format(
"Error while reading entity of type %s from Elasticsearch!", clazz.getName()),
e);
}
return entity;
}
public static List mapSearchHits(
SearchHit[] searchHits, ObjectMapper objectMapper, JavaType valueType) {
return map(
searchHits,
(searchHit) -> fromSearchHit(searchHit.getSourceAsString(), objectMapper, valueType));
}
public static T fromSearchHit(
String searchHitString, ObjectMapper objectMapper, JavaType valueType) {
final T entity;
try {
entity = objectMapper.readValue(searchHitString, valueType);
} catch (IOException e) {
throw new TasklistRuntimeException(
String.format(
"Error while reading entity of type %s from Elasticsearch!", valueType.toString()),
e);
}
return entity;
}
public static List scroll(
SearchRequest searchRequest,
Class clazz,
ObjectMapper objectMapper,
RestHighLevelClient esClient)
throws IOException {
return scroll(searchRequest, clazz, objectMapper, esClient, null, null);
}
public static List scroll(
SearchRequest searchRequest,
Class clazz,
ObjectMapper objectMapper,
RestHighLevelClient esClient,
Consumer searchHitsProcessor,
Consumer aggsProcessor)
throws IOException {
searchRequest.scroll(TimeValue.timeValueMillis(SCROLL_KEEP_ALIVE_MS));
SearchResponse response = esClient.search(searchRequest, RequestOptions.DEFAULT);
// call aggregations processor
if (aggsProcessor != null) {
aggsProcessor.accept(response.getAggregations());
}
final List result = new ArrayList<>();
String scrollId = response.getScrollId();
SearchHits hits = response.getHits();
while (hits.getHits().length != 0) {
result.addAll(mapSearchHits(hits.getHits(), objectMapper, clazz));
// call response processor
if (searchHitsProcessor != null) {
searchHitsProcessor.accept(response.getHits());
}
final SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId);
scrollRequest.scroll(TimeValue.timeValueMillis(SCROLL_KEEP_ALIVE_MS));
response = esClient.scroll(scrollRequest, RequestOptions.DEFAULT);
scrollId = response.getScrollId();
hits = response.getHits();
}
clearScroll(scrollId, esClient);
return result;
}
public static void scrollWith(
SearchRequest searchRequest,
RestHighLevelClient esClient,
Consumer searchHitsProcessor,
Consumer aggsProcessor,
Consumer firstResponseConsumer)
throws IOException {
searchRequest.scroll(TimeValue.timeValueMillis(SCROLL_KEEP_ALIVE_MS));
SearchResponse response = esClient.search(searchRequest, RequestOptions.DEFAULT);
if (firstResponseConsumer != null) {
firstResponseConsumer.accept(response.getHits());
}
// call aggregations processor
if (aggsProcessor != null) {
aggsProcessor.accept(response.getAggregations());
}
String scrollId = response.getScrollId();
SearchHits hits = response.getHits();
while (hits.getHits().length != 0) {
// call response processor
if (searchHitsProcessor != null) {
searchHitsProcessor.accept(response.getHits());
}
final SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId);
scrollRequest.scroll(TimeValue.timeValueMillis(SCROLL_KEEP_ALIVE_MS));
response = esClient.scroll(scrollRequest, RequestOptions.DEFAULT);
scrollId = response.getScrollId();
hits = response.getHits();
}
clearScroll(scrollId, esClient);
}
public static void clearScroll(String scrollId, RestHighLevelClient esClient) {
if (scrollId != null) {
// clear the scroll
final ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
clearScrollRequest.addScrollId(scrollId);
try {
esClient.clearScroll(clearScrollRequest, RequestOptions.DEFAULT);
} catch (Exception e) {
LOGGER.warn("Error occurred when clearing the scroll with id [{}]", scrollId);
}
}
}
public static List scrollIdsToList(SearchRequest request, RestHighLevelClient esClient)
throws IOException {
final List result = new ArrayList<>();
final Consumer collectIds =
(hits) -> result.addAll(map(hits.getHits(), SEARCH_HIT_ID_TO_STRING));
scrollWith(request, esClient, collectIds, null, null);
return result;
}
public static Map scrollIdsWithIndexToMap(
SearchRequest request, RestHighLevelClient esClient) throws IOException {
final Map result = new LinkedHashMap();
final Consumer collectIds =
(hits) ->
result.putAll(
Stream.of(hits.getHits())
.collect(Collectors.toMap(SearchHit::getId, SearchHit::getIndex)));
scrollWith(request, esClient, collectIds, null, null);
return result;
}
public static List scrollKeysToList(SearchRequest request, RestHighLevelClient esClient)
throws IOException {
final List result = new ArrayList<>();
final Consumer collectIds =
(hits) -> result.addAll(map(hits.getHits(), SEARCH_HIT_ID_TO_LONG));
scrollWith(request, esClient, collectIds, null, null);
return result;
}
public static List scrollFieldToList(
SearchRequest request, String fieldName, RestHighLevelClient esClient) throws IOException {
final List result = new ArrayList<>();
final Function searchHitFieldToString =
(searchHit) -> (T) searchHit.getSourceAsMap().get(fieldName);
final Consumer collectFields =
(hits) -> result.addAll(map(hits.getHits(), searchHitFieldToString));
scrollWith(request, esClient, collectFields, null, null);
return result;
}
public static Set scrollIdsToSet(SearchRequest request, RestHighLevelClient esClient)
throws IOException {
final Set result = new HashSet<>();
final Consumer collectIds =
(hits) -> result.addAll(map(hits.getHits(), SEARCH_HIT_ID_TO_STRING));
scrollWith(request, esClient, collectIds, null, collectIds);
return result;
}
public static Set scrollKeysToSet(SearchRequest request, RestHighLevelClient esClient)
throws IOException {
final Set result = new HashSet<>();
final Consumer collectIds =
(hits) -> result.addAll(map(hits.getHits(), SEARCH_HIT_ID_TO_LONG));
scrollWith(request, esClient, collectIds, null, null);
return result;
}
public static void refreshIndicesFor(
final RestHighLevelClient esClient, final String indexPattern) {
final var refreshRequest = new RefreshRequest(indexPattern);
try {
final RefreshResponse refresh =
esClient.indices().refresh(refreshRequest, RequestOptions.DEFAULT);
if (refresh.getFailedShards() > 0) {
LOGGER.warn("Unable to refresh indices: {}", indexPattern);
}
} catch (Exception ex) {
LOGGER.warn(String.format("Unable to refresh indices: %s", indexPattern), ex);
}
}
public enum QueryType {
ONLY_RUNTIME,
ALL
}
private static final class DelegatingActionListener
implements ActionListener {
private final CompletableFuture future;
private final Executor executorDelegate;
private DelegatingActionListener(
final CompletableFuture future, final Executor executor) {
this.future = future;
this.executorDelegate = executor;
}
@Override
public void onResponse(Response response) {
executorDelegate.execute(() -> future.complete(response));
}
@Override
public void onFailure(Exception e) {
executorDelegate.execute(() -> future.completeExceptionally(e));
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy