org.dspace.statistics.SolrLoggerServiceImpl Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of dspace-api Show documentation
Show all versions of dspace-api Show documentation
DSpace core data model and service APIs.
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.URLEncoder;
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import com.maxmind.geoip2.DatabaseReader;
import com.maxmind.geoip2.exception.GeoIp2Exception;
import com.maxmind.geoip2.model.CityResponse;
import com.opencsv.CSVReader;
import com.opencsv.CSVWriter;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.client.solrj.request.LukeRequest;
import org.apache.solr.client.solrj.response.CoreAdminResponse;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.LukeResponse;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.RangeFacet;
import org.apache.solr.client.solrj.response.SolrPingResponse;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.luke.FieldFlag;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.CoreAdminParams.CoreAdminAction;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.ShardParams;
import org.apache.solr.common.util.NamedList;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DCDate;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.DSpaceObjectLegacySupportService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.service.ClientInfoService;
import org.dspace.services.ConfigurationService;
import org.dspace.statistics.service.SolrLoggerService;
import org.dspace.statistics.util.LocationUtils;
import org.dspace.statistics.util.SpiderDetector;
import org.dspace.usage.UsageWorkflowEvent;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Static holder for a HttpSolrClient connection pool to issue
* usage logging events to Solr from DSpace libraries, and some static query
* composers.
*
* @author ben at atmire.com
* @author kevinvandevelde at atmire.com
* @author mdiggory at atmire.com
*/
public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBean {
private static final Logger log = LogManager.getLogger();
private static final String MULTIPLE_VALUES_SPLITTER = "|";
protected SolrClient solr;
public static final String DATE_FORMAT_8601 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
public static final String DATE_FORMAT_DCDATE = "yyyy-MM-dd'T'HH:mm:ss'Z'";
protected DatabaseReader locationService;
protected boolean useProxies;
private static final List statisticYearCores = new ArrayList<>();
private static boolean statisticYearCoresInit = false;
private static final String IP_V4_REGEX = "^((?:\\d{1,3}\\.){3})\\d{1,3}$";
private static final String IP_V6_REGEX = "^(.*):.*:.*$";
@Autowired(required = true)
protected BitstreamService bitstreamService;
@Autowired(required = true)
protected ContentServiceFactory contentServiceFactory;
@Autowired(required = true)
private ConfigurationService configurationService;
@Autowired(required = true)
private ClientInfoService clientInfoService;
@Autowired
private SolrStatisticsCore solrStatisticsCore;
/** URL to the current-year statistics core. Prior-year shards will have a year suffixed. */
private String statisticsCoreURL;
/** Name of the current-year statistics core. Prior-year shards will have a year suffixed. */
private String statisticsCoreBase;
public static enum StatisticsType {
VIEW("view"),
SEARCH("search"),
SEARCH_RESULT("search_result"),
WORKFLOW("workflow");
private final String text;
StatisticsType(String text) {
this.text = text;
}
public String text() {
return text;
}
}
protected SolrLoggerServiceImpl() {
}
@Override
public void afterPropertiesSet() throws Exception {
solr = solrStatisticsCore.getSolr();
// Read in the file so we don't have to do it all the time
//spiderIps = SpiderDetector.getSpiderIpAddresses();
DatabaseReader service = null;
// Get the db file for the location
String dbPath = configurationService.getProperty("usage-statistics.dbfile");
if (dbPath != null) {
try {
File dbFile = new File(dbPath);
service = new DatabaseReader.Builder(dbFile).build();
} catch (FileNotFoundException fe) {
log.error(
"The GeoLite Database file is missing (" + dbPath + ")! Solr Statistics cannot generate location " +
"based reports! Please see the DSpace installation instructions for instructions to install " +
"this file.",
fe);
} catch (IOException e) {
log.error(
"Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the " +
"DSpace installation instructions for more details.",
e);
}
} else {
log.error("The required 'dbfile' configuration is missing in solr-statistics.cfg!");
}
locationService = service;
}
@Override
public void post(DSpaceObject dspaceObject, HttpServletRequest request,
EPerson currentUser) {
postView(dspaceObject, request, currentUser);
}
@Override
public void postView(DSpaceObject dspaceObject, HttpServletRequest request,
EPerson currentUser) {
if (solr == null || locationService == null) {
return;
}
initSolrYearCores();
try {
SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, request, currentUser);
if (doc1 == null) {
return;
}
if (dspaceObject instanceof Bitstream) {
Bitstream bit = (Bitstream) dspaceObject;
List bundles = bit.getBundles();
for (Bundle bundle : bundles) {
doc1.addField("bundleName", bundle.getName());
}
}
doc1.addField("statistics_type", StatisticsType.VIEW.text());
solr.add(doc1);
// commits are executed automatically using the solr autocommit
boolean useAutoCommit = configurationService.getBooleanProperty("solr-statistics.autoCommit", true);
if (!useAutoCommit) {
solr.commit(false, false);
}
} catch (RuntimeException re) {
throw re;
} catch (Exception e) {
String email = null == currentUser ? "[anonymous]" : currentUser.getEmail();
log.error("Error saving VIEW event to Solr for DSpaceObject {} by EPerson {}",
dspaceObject.getID(), email, e);
}
}
@Override
public void postView(DSpaceObject dspaceObject,
String ip, String userAgent, String xforwardedfor, EPerson currentUser) {
if (solr == null || locationService == null) {
return;
}
initSolrYearCores();
try {
SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, ip, userAgent, xforwardedfor,
currentUser);
if (doc1 == null) {
return;
}
if (dspaceObject instanceof Bitstream) {
Bitstream bit = (Bitstream) dspaceObject;
List bundles = bit.getBundles();
for (Bundle bundle : bundles) {
doc1.addField("bundleName", bundle.getName());
}
}
doc1.addField("statistics_type", StatisticsType.VIEW.text());
solr.add(doc1);
// commits are executed automatically using the solr autocommit
boolean useAutoCommit = configurationService.getBooleanProperty("solr-statistics.autoCommit", true);
if (!useAutoCommit) {
solr.commit(false, false);
}
} catch (RuntimeException re) {
throw re;
} catch (Exception e) {
log.error("Error saving VIEW event to Solr for DSpaceObject {} by EPerson {}",
dspaceObject.getID(), currentUser.getEmail(), e);
}
}
/**
* Returns a solr input document containing common information about the statistics
* regardless if we are logging a search or a view of a DSpace object
*
* @param dspaceObject the object used.
* @param request the current request context.
* @param currentUser the current session's user.
* @return a solr input document
* @throws SQLException in case of a database exception
*/
protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServletRequest request,
EPerson currentUser) throws SQLException {
boolean isSpiderBot = request != null && SpiderDetector.isSpider(request);
if (isSpiderBot &&
!configurationService.getBooleanProperty("usage-statistics.logBots", true)) {
return null;
}
SolrInputDocument doc1 = new SolrInputDocument();
// Save our basic info that we already have
if (request != null) {
String ip = clientInfoService.getClientIp(request);
if (configurationService.getBooleanProperty("anonymize_statistics.anonymize_on_log", false)) {
try {
doc1.addField("ip", anonymizeIp(ip));
} catch (UnknownHostException e) {
log.warn(e.getMessage(), e);
}
} else {
doc1.addField("ip", ip);
}
//Also store the referrer
if (request.getHeader("referer") != null) {
doc1.addField("referrer", request.getHeader("referer"));
}
InetAddress ipAddress = null;
try {
String dns;
if (!configurationService.getBooleanProperty("anonymize_statistics.anonymize_on_log", false)) {
ipAddress = InetAddress.getByName(ip);
dns = ipAddress.getHostName();
} else {
dns = configurationService.getProperty("anonymize_statistics.dns_mask", "anonymized");
}
doc1.addField("dns", dns.toLowerCase(Locale.ROOT));
} catch (UnknownHostException e) {
log.info("Failed DNS Lookup for IP: {}", ip);
log.debug(e.getMessage(), e);
}
if (request.getHeader("User-Agent") != null) {
doc1.addField("userAgent", request.getHeader("User-Agent"));
}
doc1.addField("isBot", isSpiderBot);
// Save the location information if valid, save the event without
// location information if not valid
if (locationService != null && ipAddress != null) {
try {
CityResponse location = locationService.city(ipAddress);
String countryCode = location.getCountry().getIsoCode();
double latitude = location.getLocation().getLatitude();
double longitude = location.getLocation().getLongitude();
if (!(
"--".equals(countryCode)
&& latitude == -180
&& longitude == -180)
) {
try {
doc1.addField("continent", LocationUtils
.getContinentCode(countryCode));
} catch (Exception e) {
log.warn("Failed to load country/continent table: {}", countryCode);
}
doc1.addField("countryCode", countryCode);
doc1.addField("city", location.getCity().getName());
doc1.addField("latitude", latitude);
doc1.addField("longitude", longitude);
}
} catch (IOException e) {
log.warn("GeoIP lookup failed.", e);
} catch (GeoIp2Exception e) {
log.info("Unable to get location of request: {}", e.getMessage());
}
}
}
if (dspaceObject != null) {
doc1.addField("id", dspaceObject.getID().toString());
doc1.addField("type", dspaceObject.getType());
storeParents(doc1, dspaceObject);
}
// Save the current time
doc1.addField("time", DateFormatUtils.format(new Date(), DATE_FORMAT_8601));
if (currentUser != null) {
doc1.addField("epersonid", currentUser.getID().toString());
}
return doc1;
}
protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, String ip, String userAgent,
String xforwardedfor, EPerson currentUser) throws SQLException {
boolean isSpiderBot = SpiderDetector.isSpider(ip);
if (isSpiderBot &&
!configurationService.getBooleanProperty("usage-statistics.logBots", true)) {
return null;
}
SolrInputDocument doc1 = new SolrInputDocument();
// Save our basic info that we already have
ip = clientInfoService.getClientIp(ip, xforwardedfor);
if (configurationService.getBooleanProperty("anonymize_statistics.anonymize_on_log", false)) {
try {
doc1.addField("ip", anonymizeIp(ip));
} catch (UnknownHostException e) {
log.warn(e.getMessage(), e);
}
} else {
doc1.addField("ip", ip);
}
InetAddress ipAddress = null;
try {
String dns;
if (!configurationService.getBooleanProperty("anonymize_statistics.anonymize_on_log", false)) {
ipAddress = InetAddress.getByName(ip);
dns = ipAddress.getHostName();
} else {
dns = configurationService.getProperty("anonymize_statistics.dns_mask", "anonymized");
}
doc1.addField("dns", dns.toLowerCase(Locale.ROOT));
} catch (UnknownHostException e) {
log.info("Failed DNS Lookup for IP: {}", ip);
log.debug(e.getMessage(), e);
}
if (userAgent != null) {
doc1.addField("userAgent", userAgent);
}
doc1.addField("isBot", isSpiderBot);
// Save the location information if valid, save the event without
// location information if not valid
if (locationService != null) {
try {
CityResponse location = locationService.city(ipAddress);
String countryCode = location.getCountry().getIsoCode();
double latitude = location.getLocation().getLatitude();
double longitude = location.getLocation().getLongitude();
if (!(
"--".equals(countryCode)
&& latitude == -180
&& longitude == -180)
) {
try {
doc1.addField("continent", LocationUtils
.getContinentCode(countryCode));
} catch (Exception e) {
System.out
.println("COUNTRY ERROR: " + countryCode);
}
doc1.addField("countryCode", countryCode);
doc1.addField("city", location.getCity().getName());
doc1.addField("latitude", latitude);
doc1.addField("longitude", longitude);
}
} catch (IOException e) {
log.warn("GeoIP lookup failed.", e);
} catch (GeoIp2Exception e) {
log.info("Unable to get location of request: {}", e.getMessage());
}
}
if (dspaceObject != null) {
doc1.addField("id", dspaceObject.getID().toString());
doc1.addField("type", dspaceObject.getType());
storeParents(doc1, dspaceObject);
}
// Save the current time
doc1.addField("time", DateFormatUtils.format(new Date(), DATE_FORMAT_8601));
if (currentUser != null) {
doc1.addField("epersonid", currentUser.getID().toString());
}
return doc1;
}
@Override
public void postSearch(DSpaceObject resultObject, HttpServletRequest request, EPerson currentUser,
List queries, int rpp, String sortBy, String order, int page, DSpaceObject scope) {
try {
SolrInputDocument solrDoc = getCommonSolrDoc(resultObject, request, currentUser);
if (solrDoc == null) {
return;
}
initSolrYearCores();
for (String query : queries) {
solrDoc.addField("query", query);
}
if (resultObject != null) {
//We have a search result
solrDoc.addField("statistics_type", StatisticsType.SEARCH_RESULT.text());
} else {
solrDoc.addField("statistics_type", StatisticsType.SEARCH.text());
}
//Store the scope
if (scope != null) {
solrDoc.addField("scopeId", scope.getID().toString());
solrDoc.addField("scopeType", scope.getType());
}
if (rpp != -1) {
solrDoc.addField("rpp", rpp);
}
if (sortBy != null) {
solrDoc.addField("sortBy", sortBy);
if (order != null) {
solrDoc.addField("sortOrder", order);
}
}
if (page != -1) {
solrDoc.addField("page", page);
}
solr.add(solrDoc);
} catch (RuntimeException re) {
throw re;
} catch (Exception e) {
log.error("Error saving SEARCH event to Solr by EPerson {}",
currentUser.getEmail(), e);
}
}
@Override
public void postWorkflow(UsageWorkflowEvent usageWorkflowEvent) throws SQLException {
initSolrYearCores();
try {
SolrInputDocument solrDoc = getCommonSolrDoc(usageWorkflowEvent.getObject(), null, null);
//Log the current collection & the scope !
solrDoc.addField("owningColl", usageWorkflowEvent.getScope().getID().toString());
storeParents(solrDoc, usageWorkflowEvent.getScope());
if (usageWorkflowEvent.getWorkflowStep() != null) {
solrDoc.addField("workflowStep", usageWorkflowEvent.getWorkflowStep());
}
if (usageWorkflowEvent.getOldState() != null) {
solrDoc.addField("previousWorkflowStep", usageWorkflowEvent.getOldState());
}
if (usageWorkflowEvent.getGroupOwners() != null) {
for (int i = 0; i < usageWorkflowEvent.getGroupOwners().length; i++) {
Group group = usageWorkflowEvent.getGroupOwners()[i];
solrDoc.addField("owner", "g" + group.getID().toString());
}
}
if (usageWorkflowEvent.getEpersonOwners() != null) {
for (int i = 0; i < usageWorkflowEvent.getEpersonOwners().length; i++) {
EPerson ePerson = usageWorkflowEvent.getEpersonOwners()[i];
solrDoc.addField("owner", "e" + ePerson.getID().toString());
}
}
solrDoc.addField("workflowItemId", usageWorkflowEvent.getWorkflowItem().getID().toString());
EPerson submitter = ((Item) usageWorkflowEvent.getObject()).getSubmitter();
if (submitter != null) {
solrDoc.addField("submitter", submitter.getID().toString());
}
solrDoc.addField("statistics_type", StatisticsType.WORKFLOW.text());
if (usageWorkflowEvent.getActor() != null) {
solrDoc.addField("actor", usageWorkflowEvent.getActor().getID().toString());
}
solr.add(solrDoc);
} catch (Exception e) {
//Log the exception, no need to send it through, the workflow shouldn't crash because of this !
log.error("Error saving WORKFLOW event to Solr", e);
}
}
@Override
public void storeParents(SolrInputDocument doc1, DSpaceObject dso)
throws SQLException {
if (dso instanceof Community) {
Community comm = (Community) dso;
List parentCommunities = comm.getParentCommunities();
for (Community parent : parentCommunities) {
doc1.addField("owningComm", parent.getID().toString());
storeParents(doc1, parent);
}
} else if (dso instanceof Collection) {
Collection coll = (Collection) dso;
List communities = coll.getCommunities();
for (Community community : communities) {
doc1.addField("owningComm", community.getID().toString());
storeParents(doc1, community);
}
} else if (dso instanceof Item) {
Item item = (Item) dso;
List collections = item.getCollections();
for (Collection collection : collections) {
doc1.addField("owningColl", collection.getID().toString());
storeParents(doc1, collection);
}
} else if (dso instanceof Bitstream) {
Bitstream bitstream = (Bitstream) dso;
List bundles = bitstream.getBundles();
for (Bundle bundle : bundles) {
List- items = bundle.getItems();
for (Item item : items) {
doc1.addField("owningItem", item.getID().toString());
storeParents(doc1, item);
}
}
}
}
@Override
public boolean isUseProxies() {
return clientInfoService.isUseProxiesEnabled();
}
@Override
public void removeIndex(String query) throws IOException,
SolrServerException {
solr.deleteByQuery(query);
solr.commit();
}
@Override
public Map
> queryField(String query,
List oldFieldVals, String field)
throws IOException {
Map> currentValsStored = new HashMap<>();
try {
// Get one document (since all the metadata for all the values
// should be the same just get the first one we find
Map params = new HashMap<>();
params.put("q", query);
params.put("rows", "1");
MapSolrParams solrParams = new MapSolrParams(params);
QueryResponse response = solr.query(solrParams);
// Make sure we at least got a document
if (response.getResults().getNumFound() == 0) {
return currentValsStored;
}
} catch (SolrServerException e) {
e.printStackTrace();
}
return currentValsStored;
}
public class ResultProcessor {
private SolrInputDocument toSolrInputDocument(SolrDocument d) {
SolrInputDocument doc = new SolrInputDocument();
for (String name : d.getFieldNames()) {
doc.addField(name, d.getFieldValue(name));
}
return doc;
}
public void execute(String query) throws SolrServerException, IOException {
Map params = new HashMap<>();
params.put("q", query);
params.put("rows", "10");
if (0 < statisticYearCores.size()) {
params.put(ShardParams.SHARDS, StringUtils.join(statisticYearCores.iterator(), ','));
}
MapSolrParams solrParams = new MapSolrParams(params);
QueryResponse response = solr.query(solrParams);
SolrDocumentList results = response.getResults();
long numbFound = results.getNumFound();
// process the first batch
for (SolrDocument result : results) {
process(toSolrInputDocument(result));
}
// Run over the rest
for (int i = 10; i < numbFound; i += 10) {
params.put("start", String.valueOf(i));
solrParams = new MapSolrParams(params);
response = solr.query(solrParams);
results = response.getResults();
for (SolrDocument result : results) {
process(toSolrInputDocument(result));
}
}
}
public void commit() throws IOException, SolrServerException {
solr.commit();
}
/**
* Override to manage pages of documents
*
* @param docs a list of Solr documents
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
* @throws SolrServerException Exception from the Solr server to the solrj Java client.
*/
public void process(List docs) throws IOException, SolrServerException {
for (SolrInputDocument doc : docs) {
process(doc);
}
}
/**
* Override to manage individual documents
*
* @param doc Solr document
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
* @throws SolrServerException Exception from the Solr server to the solrj Java client.
*/
public void process(SolrInputDocument doc) throws IOException, SolrServerException {
}
}
@Override
public void markRobotsByIP() {
for (String ip : SpiderDetector.getSpiderIpAddresses()) {
try {
/* Result Process to alter record to be identified as a bot */
ResultProcessor processor = new ResultProcessor() {
@Override
public void process(SolrInputDocument doc) throws IOException, SolrServerException {
doc.removeField("isBot");
doc.addField("isBot", true);
solr.add(doc);
log.info("Marked " + doc.getFieldValue("ip") + " as bot");
}
};
/* query for ip, exclude results previously set as bots. */
processor.execute("ip:" + ip + "* AND -isBot:true");
solr.commit();
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
}
@Override
public void markRobotByUserAgent(String agent) {
try {
/* Result Process to alter record to be identified as a bot */
ResultProcessor processor = new ResultProcessor() {
@Override
public void process(SolrInputDocument doc) throws IOException, SolrServerException {
doc.removeField("isBot");
doc.addField("isBot", true);
solr.add(doc);
}
};
/* query for ip, exclude results previously set as bots. */
processor.execute("userAgent:" + agent + " AND -isBot:true");
solr.commit();
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
@Override
public void deleteRobotsByIsBotFlag() {
try {
solr.deleteByQuery("isBot:true");
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
@Override
public void deleteIP(String ip) {
try {
solr.deleteByQuery("ip:" + ip + "*");
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
@Override
public void deleteRobotsByIP() {
for (String ip : SpiderDetector.getSpiderIpAddresses()) {
deleteIP(ip);
}
}
@Override
public void update(String query, String action,
List fieldNames, List> fieldValuesList)
throws SolrServerException, IOException {
update(query, action, fieldNames, fieldValuesList, true);
}
@Override
public void update(String query, String action,
List fieldNames, List> fieldValuesList, boolean commit)
throws SolrServerException, IOException {
// Since there is NO update
// We need to get our documents
// QueryResponse queryResponse = solr.query()//query(query, null, -1,
// null, null, null);
List docsToUpdate = new ArrayList<>();
ResultProcessor processor = new ResultProcessor() {
@Override
public void process(SolrInputDocument document) {
docsToUpdate.add(document);
}
};
processor.execute(query);
// Add the new (updated onces
for (int i = 0; i < docsToUpdate.size(); i++) {
SolrInputDocument solrDocument = docsToUpdate.get(i);
// Delete the document from the solr client
solr.deleteByQuery("uid:" + solrDocument.getFieldValue("uid"));
// Now loop over our fieldname actions
for (int j = 0; j < fieldNames.size(); j++) {
String fieldName = fieldNames.get(j);
List
© 2015 - 2024 Weber Informatics LLC | Privacy Policy