All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.jaeksoft.searchlib.crawler.file.database.FileCrawlQueue Maven / Gradle / Ivy

Go to download

OpenSearchServer is a powerful, enterprise-class, search engine program. Using the web user interface, the crawlers (web, file, database, ...) and the REST/RESTFul API you will be able to integrate quickly and easily advanced full-text search capabilities in your application. OpenSearchServer runs on Windows and Linux/Unix/BSD.

The newest version!
/**   
 * License Agreement for OpenSearchServer
 *
 * Copyright (C) 2008-2013 Emmanuel Keller / Jaeksoft
 * 
 * http://www.open-search-server.com
 * 
 * This file is part of OpenSearchServer.
 *
 * OpenSearchServer is free software: you can redistribute it and/or
 * modify it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 *  (at your option) any later version.
 *
 * OpenSearchServer is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 *  You should have received a copy of the GNU General Public License
 *  along with OpenSearchServer. 
 *  If not, see .
 **/

package com.jaeksoft.searchlib.crawler.file.database;

import java.io.IOException;
import java.net.URISyntaxException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.List;

import org.apache.http.HttpException;

import com.jaeksoft.searchlib.SearchLibException;
import com.jaeksoft.searchlib.config.Config;
import com.jaeksoft.searchlib.crawler.common.process.CrawlQueueAbstract;
import com.jaeksoft.searchlib.crawler.common.process.CrawlStatistics;
import com.jaeksoft.searchlib.crawler.file.spider.CrawlFile;
import com.jaeksoft.searchlib.util.ReadWriteLock;

public class FileCrawlQueue extends CrawlQueueAbstract {

	final private ReadWriteLock rwl = new ReadWriteLock();

	private List updateCrawlList;
	private List deleteUriList;

	private List workingUpdateCrawlList;
	private List workingDeleteUriList;

	public FileCrawlQueue(Config config) {
		super(config);
		this.updateCrawlList = new ArrayList(0);
		this.deleteUriList = new ArrayList(0);
	}

	public void add(CrawlStatistics crawlStats, CrawlFile crawl)
			throws NoSuchAlgorithmException, IOException, SearchLibException {
		rwl.r.lock();
		try {
			updateCrawlList.add(crawl);
			crawlStats.incPendingUpdateCount();
		} finally {
			rwl.r.unlock();
		}
	}

	public void delete(CrawlStatistics crawlStats, String uri) {
		rwl.r.lock();
		try {
			deleteUriList.add(uri);
			crawlStats.incPendingDeleteCount();
		} finally {
			rwl.r.unlock();
		}
	}

	@Override
	protected boolean shouldWePersist() {
		rwl.r.lock();
		try {
			if (updateCrawlList.size() >= getMaxBufferSize())
				return true;
			if (deleteUriList.size() >= getMaxBufferSize())
				return true;
			return false;
		} finally {
			rwl.r.unlock();
		}
	}

	@Override
	protected boolean workingInProgress() {
		rwl.r.lock();
		try {
			if (workingUpdateCrawlList != null)
				return true;
			if (workingDeleteUriList != null)
				return true;
			return false;
		} finally {
			rwl.r.unlock();
		}
	}

	@Override
	protected void initWorking() {
		rwl.w.lock();
		try {
			workingUpdateCrawlList = updateCrawlList;
			workingDeleteUriList = deleteUriList;

			updateCrawlList = new ArrayList(0);
			deleteUriList = new ArrayList(0);

			if (getSessionStats() != null)
				getSessionStats().resetPending();
		} finally {
			rwl.w.unlock();
		}
	}

	@Override
	protected void resetWork() {
		rwl.w.lock();
		try {
			workingUpdateCrawlList = null;
			workingDeleteUriList = null;
		} finally {
			rwl.w.unlock();
		}
	}

	@Override
	protected void indexWork() throws SearchLibException, IOException, URISyntaxException, InstantiationException,
			IllegalAccessException, ClassNotFoundException, HttpException {
		CrawlStatistics sessionStats = getSessionStats();
		deleteCollection(workingDeleteUriList, sessionStats);
		updateCrawls(workingUpdateCrawlList, sessionStats);
	}

	protected boolean updateCrawls(List workUpdateCrawlList, CrawlStatistics sessionStats)
			throws SearchLibException {
		if (workUpdateCrawlList.size() == 0)
			return false;

		FileManager manager = getConfig().getFileManager();
		manager.updateCrawlTarget(workUpdateCrawlList, getMaxBufferSize());
		manager.updateCrawlUriDb(workUpdateCrawlList);

		if (sessionStats != null)
			sessionStats.addUpdatedCount(workUpdateCrawlList.size());
		setContainedData();
		return true;
	}

	protected boolean deleteCollection(List workDeleteUriList, CrawlStatistics sessionStats)
			throws SearchLibException {
		if (workDeleteUriList.size() == 0)
			return false;

		FileManager manager = getConfig().getFileManager();
		int nbFilesDeleted = manager.deleteByUri(workDeleteUriList) ? 1 : 0;
		if (sessionStats != null)
			sessionStats.addDeletedCount(nbFilesDeleted);
		setContainedData();
		return true;
	}

}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy