
org.molgenis.elasticsearch.ElasticSearchService Maven / Gradle / Ivy
package org.molgenis.elasticsearch;
import static org.elasticsearch.client.Requests.refreshRequest;
import static org.molgenis.elasticsearch.util.MapperTypeSanitizer.sanitizeMapperType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsRequest;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.deletebyquery.DeleteByQueryResponse;
import org.elasticsearch.action.deletebyquery.IndexDeleteByQueryResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.molgenis.data.Query;
import org.molgenis.data.Repository;
import org.molgenis.elasticsearch.index.IndexRequestGenerator;
import org.molgenis.elasticsearch.index.MappingsBuilder;
import org.molgenis.elasticsearch.request.SearchRequestGenerator;
import org.molgenis.elasticsearch.response.ResponseParser;
import org.molgenis.search.MultiSearchRequest;
import org.molgenis.search.SearchRequest;
import org.molgenis.search.SearchResult;
import org.molgenis.search.SearchService;
/**
* ElasticSearch implementation of the SearchService interface
*
* @author erwin
*
*/
public class ElasticSearchService implements SearchService
{
private static final Logger LOG = Logger.getLogger(ElasticSearchService.class);
private final String indexName;
private final Client client;
private final ResponseParser responseParser = new ResponseParser();
private final SearchRequestGenerator generator = new SearchRequestGenerator();
public ElasticSearchService(Client client, String indexName)
{
if (client == null)
{
throw new IllegalArgumentException("Client is null");
}
if (indexName == null)
{
throw new IllegalArgumentException("IndexName is null");
}
this.indexName = indexName;
this.client = client;
createIndexIfNotExists();
}
@Override
public SearchResult search(SearchRequest request)
{
return search(SearchType.QUERY_AND_FETCH, request);
}
@Override
public SearchResult multiSearch(MultiSearchRequest request)
{
return multiSearch(SearchType.QUERY_AND_FETCH, request);
}
@Override
public long count(String documentType, Query q)
{
SearchRequest request = new SearchRequest(documentType, q, Collections. emptyList());
SearchResult result = search(SearchType.COUNT, request);
return result.getTotalHitCount();
}
public SearchResult multiSearch(SearchType searchType, MultiSearchRequest request)
{
List documentTypes = null;
if (request.getDocumentType() != null)
{
documentTypes = new ArrayList();
for (String documentType : request.getDocumentType())
{
documentTypes.add(sanitizeMapperType(documentType));
}
}
SearchRequestBuilder builder = client.prepareSearch(indexName);
generator.buildSearchRequest(builder, documentTypes, searchType, request.getQuery(),
request.getFieldsToReturn(), null, null);
if (LOG.isDebugEnabled())
{
LOG.debug("SearchRequestBuilder:" + builder);
}
SearchResponse response = builder.execute().actionGet();
if (LOG.isDebugEnabled())
{
LOG.debug("SearchResponse:" + response);
}
return responseParser.parseSearchResponse(response);
}
private SearchResult search(SearchType searchType, SearchRequest request)
{
SearchRequestBuilder builder = client.prepareSearch(indexName);
String documentType = request.getDocumentType() == null ? null : sanitizeMapperType(request.getDocumentType());
generator.buildSearchRequest(builder, documentType, searchType, request.getQuery(),
request.getFieldsToReturn(), request.getAggregateField1(), request.getAggregateField2());
if (LOG.isDebugEnabled())
{
LOG.debug("SearchRequestBuilder:" + builder);
}
SearchResponse response = builder.execute().actionGet();
if (LOG.isDebugEnabled())
{
LOG.debug("SearchResponse:" + response);
}
return responseParser.parseSearchResponse(response);
}
@Override
public void indexRepository(Repository repository)
{
if (!repository.iterator().hasNext())
{
return;
}
try
{
LOG.info("Going to create mapping for repository [" + repository.getName() + "]");
createMappings(repository);
}
catch (IOException e)
{
String msg = "Exception creating mapping for repository [" + repository.getName() + "]";
LOG.error(msg, e);
throw new ElasticsearchException(msg, e);
}
LOG.info("Going to update index [" + indexName + "] for repository type [" + repository.getName() + "]");
deleteDocumentsByType(repository.getName());
LOG.info("Going to insert documents of type [" + repository.getName() + "]");
IndexRequestGenerator requestGenerator = new IndexRequestGenerator(client, indexName);
Iterable requests = requestGenerator.buildIndexRequest(repository);
for (BulkRequestBuilder request : requests)
{
LOG.info("Request created");
if (LOG.isDebugEnabled())
{
LOG.debug("BulkRequest:" + request);
}
BulkResponse response = request.execute().actionGet();
LOG.info("Request done");
if (LOG.isDebugEnabled())
{
LOG.debug("BulkResponse:" + response);
}
if (response.hasFailures())
{
throw new ElasticsearchException(response.buildFailureMessage());
}
}
}
@Override
public boolean documentTypeExists(String documentType)
{
String documentTypeSantized = sanitizeMapperType(documentType);
return client.admin().indices().typesExists(new TypesExistsRequest(new String[]
{ indexName }, documentTypeSantized)).actionGet().isExists();
}
@Override
public void deleteDocumentsByType(String documentType)
{
LOG.info("Going to delete all documents of type [" + documentType + "]");
String documentTypeSantized = sanitizeMapperType(documentType);
DeleteByQueryResponse deleteResponse = client.prepareDeleteByQuery(indexName)
.setQuery(new TermQueryBuilder("_type", documentTypeSantized)).execute().actionGet();
if (deleteResponse != null)
{
IndexDeleteByQueryResponse idbqr = deleteResponse.getIndex(indexName);
if ((idbqr != null) && (idbqr.getFailedShards() > 0))
{
throw new ElasticsearchException("Delete failed. Returned headers:" + idbqr.getHeaders());
}
}
LOG.info("Delete done.");
}
@Override
public void deleteDocumentByIds(String documentType, List documentIds)
{
LOG.info("Going to delete document of type [" + documentType + "] with Id : " + documentIds);
String documentTypeSantized = sanitizeMapperType(documentType);
for (String documentId : documentIds)
{
DeleteResponse deleteResponse = client.prepareDelete(indexName, documentTypeSantized, documentId)
.setRefresh(true).execute().actionGet();
if (deleteResponse != null)
{
if (!deleteResponse.isFound())
{
throw new ElasticsearchException("Delete failed. Returned headers:" + deleteResponse.getHeaders());
}
}
}
LOG.info("Delete done.");
}
@Override
public void updateRepositoryIndex(Repository repository)
{
if (!repository.iterator().hasNext())
{
return;
}
try
{
LOG.info("Going to create mapping for repository [" + repository.getName() + "]");
createMappings(repository);
}
catch (IOException e)
{
String msg = "Exception creating mapping for repository [" + repository.getName() + "]";
LOG.error(msg, e);
throw new ElasticsearchException(msg, e);
}
LOG.info("Going to insert documents of type [" + repository.getName() + "]");
IndexRequestGenerator requestGenerator = new IndexRequestGenerator(client, indexName);
Iterable requests = requestGenerator.buildIndexRequest(repository);
for (BulkRequestBuilder request : requests)
{
LOG.info("Request created");
if (LOG.isDebugEnabled())
{
LOG.debug("BulkRequest:" + request);
}
BulkResponse response = request.execute().actionGet();
LOG.info("Request done");
if (LOG.isDebugEnabled())
{
LOG.debug("BulkResponse:" + response);
}
if (response.hasFailures())
{
throw new ElasticsearchException(response.buildFailureMessage());
}
}
}
@Override
public void updateDocumentById(String documentType, String documentId, String updateScript)
{
LOG.info("Going to delete document of type [" + documentType + "] with Id : " + documentId);
String documentTypeSantized = sanitizeMapperType(documentType);
UpdateResponse updateResponse = client.prepareUpdate(indexName, documentTypeSantized, documentId)
.setScript("ctx._source." + updateScript).execute().actionGet();
if (updateResponse == null)
{
throw new ElasticsearchException("update failed.");
}
LOG.info("Update done.");
}
private void createIndexIfNotExists()
{
// Wait until elasticsearch is ready
client.admin().cluster().prepareHealth().setWaitForYellowStatus().execute().actionGet();
boolean hasIndex = client.admin().indices().exists(new IndicesExistsRequest(indexName)).actionGet().isExists();
if (!hasIndex)
{
CreateIndexResponse response = client.admin().indices().prepareCreate(indexName).execute().actionGet();
if (!response.isAcknowledged())
{
throw new ElasticsearchException("Creation of index [" + indexName + "] failed. Response=" + response);
}
LOG.info("Index [" + indexName + "] created");
}
}
private void createMappings(Repository repository) throws IOException
{
XContentBuilder jsonBuilder = MappingsBuilder.buildMapping(repository);
LOG.info("Going to create mapping [" + jsonBuilder.string() + "]");
PutMappingResponse response = client.admin().indices().preparePutMapping(indexName)
.setType(sanitizeMapperType(repository.getName())).setSource(jsonBuilder).execute().actionGet();
if (!response.isAcknowledged())
{
throw new ElasticsearchException("Creation of mapping for documentType [" + repository.getName()
+ "] failed. Response=" + response);
}
LOG.info("Mapping for documentType [" + repository.getName() + "] created");
}
@Override
public void refresh()
{
client.admin().indices().refresh(refreshRequest()).actionGet();
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy