Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
package org.codelibs.elasticsearch.solr.rest;
import java.io.ByteArrayInputStream;
import java.io.StringReader;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest.ACTION;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrInputField;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.codelibs.elasticsearch.solr.SolrPluginConstants;
import org.codelibs.elasticsearch.solr.solr.JavaBinUpdateRequestCodec;
import org.codelibs.elasticsearch.solr.solr.SolrResponseUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
import org.elasticsearch.action.admin.indices.optimize.OptimizeRequest;
import org.elasticsearch.action.admin.indices.optimize.OptimizeResponse;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.deletebyquery.DeleteByQueryRequest;
import org.elasticsearch.action.deletebyquery.DeleteByQueryResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.replication.ReplicationType;
import org.elasticsearch.action.support.replication.ShardReplicationOperationRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
public class SolrUpdateRestAction extends BaseRestHandler {
private static final String TRUE = "true";
// fields in the Solr input document to scan for a document id
private static final String[] DEFAULT_ID_FIELDS = { "id", "docid",
"documentid", "contentid", "uuid", "url" };
// the xml input factory
private final XMLInputFactory inputFactory = XMLInputFactory.newInstance();
// Set this flag to false if you want to disable the hashing of id's as they
// are provided by the Solr Input document
// , which is the default behaviour.
// You can configure this by adding 'plugin.diji.MockSolrPlugin.hashIds:
// false' to elasticsearch.yml
private final boolean hashIds;
private final boolean commitAsFlush;
private final boolean optimizeAsOptimize;
private final String defaultIndexName;
private final String defaultTypeName;
private final String[] idFields;
private Boolean lowercaseExpandedTerms;
private Boolean autoGeneratePhraseQueries;
/**
* Rest actions that mock Solr update handlers
*
* @param settings
* ES settings
* @param client
* ES client
* @param restController
* ES rest controller
*/
@Inject
public SolrUpdateRestAction(final Settings settings, final Client client,
final RestController restController) {
super(settings, restController, client);
hashIds = settings.getAsBoolean("solr.hashIds", false);
commitAsFlush = settings.getAsBoolean("solr.commitAsFlush", true);
optimizeAsOptimize = settings.getAsBoolean("solr.optimizeAsOptimize",
true);
logger.info("Solr input document id's will " + (hashIds ? "" : "not ")
+ "be hashed to created Elasticsearch document id's");
defaultIndexName = settings.get("solr.default.index",
SolrPluginConstants.DEFAULT_INDEX_NAME);
defaultTypeName = settings.get("solr.default.type",
SolrPluginConstants.DEFAULT_TYPE_NAME);
idFields = settings.getAsArray("solr.idFields", DEFAULT_ID_FIELDS);
lowercaseExpandedTerms = settings.getAsBoolean(
"solr.lowercaseExpandedTerms", false);
autoGeneratePhraseQueries = settings.getAsBoolean(
"solr.autoGeneratePhraseQueries", true);
// register update handlers
// specifying and index and type is optional
restController.registerHandler(RestRequest.Method.GET, "/_solr/update",
this);
restController.registerHandler(RestRequest.Method.GET,
"/_solr/update/{handler}", this);
restController.registerHandler(RestRequest.Method.GET,
"/{index}/_solr/update", this);
restController.registerHandler(RestRequest.Method.GET,
"/{index}/{type}/_solr/update", this);
restController.registerHandler(RestRequest.Method.POST,
"/_solr/update", this);
restController.registerHandler(RestRequest.Method.POST,
"/_solr/update/{handler}", this);
restController.registerHandler(RestRequest.Method.POST,
"/{index}/_solr/update", this);
restController.registerHandler(RestRequest.Method.POST,
"/{index}/{type}/_solr/update", this);
}
@Override
protected void handleRequest(final RestRequest request,
final RestChannel channel, final Client client) {
final long startTime = System.currentTimeMillis();
final RestRequest requestEx = new ExtendedRestRequest(request);
boolean isCommit = false;
boolean isOptimize = false;
// get the type of Solr update handler we want to mock, default to xml
final String contentType = request.header("Content-Type");
String requestType = null;
if (contentType != null) {
if (contentType.indexOf("application/javabin") >= 0) {
requestType = SolrPluginConstants.JAVABIN_FORMAT_TYPE;
} else if (contentType.indexOf("application/x-www-form-urlencoded") >= 0) {
isCommit = requestEx.paramAsBoolean("commit", false);
isOptimize = requestEx.paramAsBoolean("optimize", false);
requestType = SolrPluginConstants.NONE_FORMAT_TYPE;
}
}
if (requestType == null) {
requestType = SolrPluginConstants.XML_FORMAT_TYPE;
}
// Requests are typically sent to Solr in batches of documents
// We can copy that by submitting batch requests to Solr
final BulkRequest bulkRequest = Requests.bulkRequest();
final List deleteQueryList = new ArrayList();
// parse and handle the content
final BytesReference content = requestEx.content();
if (content.length() == 0) {
if (TRUE.equalsIgnoreCase(requestEx.param("commit"))
|| TRUE.equalsIgnoreCase(requestEx.param("softCommit"))
|| TRUE.equalsIgnoreCase(requestEx.param("prepareCommit"))
|| StringUtils.isNotBlank(requestEx.param("commitWithin"))) {
isCommit = true;
} else if (TRUE.equalsIgnoreCase(requestEx.param("optimize"))) {
isOptimize = true;
} else if (TRUE.equalsIgnoreCase(requestEx.param("rollback"))) {
isCommit = true; // rollback is not supported
}
} else if (SolrPluginConstants.XML_FORMAT_TYPE.equals(requestType)) {
// XML Content
XMLStreamReader parser = null;
try {
// create parser for the content
parser = inputFactory.createXMLStreamReader(new StringReader(
content.toUtf8()));
// parse the xml
// we only care about doc and delete tags for now
boolean stop = false;
while (!stop) {
// get the xml "event"
final int event = parser.next();
switch (event) {
case XMLStreamConstants.END_DOCUMENT:
// this is the end of the document
// close parser and exit while loop
stop = true;
break;
case XMLStreamConstants.START_ELEMENT:
// start of an xml tag
// determine if we need to add or delete a document
final String currTag = parser.getLocalName();
if ("doc".equals(currTag)) {
// add a document
final Map doc = parseXmlDoc(parser);
if (doc != null) {
bulkRequest
.add(getIndexRequest(doc, requestEx));
}
} else if ("delete".equals(currTag)) {
// delete a document
final List> requestList = parseXmlDelete(
parser, requestEx);
for (final ActionRequest> req : requestList) {
if (req instanceof DeleteRequest) {
bulkRequest.add(req);
} else if (req instanceof DeleteByQueryRequest) {
deleteQueryList
.add((DeleteByQueryRequest) req);
}
}
} else if ("commit".equals(currTag)) {
isCommit = true;
} else if ("optimize".equals(currTag)) {
isOptimize = true;
}
// rollback is not supported at the moment..
break;
default:
break;
}
}
} catch (final Exception e) {
// some sort of error processing the xml input
logger.error("Error processing xml input", e);
final NamedList