All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.jaeksoft.searchlib.crawler.web.robotstxt.DisallowList Maven / Gradle / Ivy

Go to download

OpenSearchServer is a powerful, enterprise-class, search engine program. Using the web user interface, the crawlers (web, file, database, ...) and the REST/RESTFul API you will be able to integrate quickly and easily advanced full-text search capabilities in your application. OpenSearchServer runs on Windows and Linux/Unix/BSD.

There is a newer version: 1.5.14
Show newest version
/**   
 * License Agreement for OpenSearchServer
 *
 * Copyright (C) 2008-2012 Emmanuel Keller / Jaeksoft
 * 
 * http://www.open-search-server.com
 * 
 * This file is part of OpenSearchServer.
 *
 * OpenSearchServer is free software: you can redistribute it and/or
 * modify it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 *  (at your option) any later version.
 *
 * OpenSearchServer is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 *  You should have received a copy of the GNU General Public License
 *  along with OpenSearchServer. 
 *  If not, see .
 **/

package com.jaeksoft.searchlib.crawler.web.robotstxt;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.TreeMap;

import com.jaeksoft.searchlib.analysis.LanguageEnum;
import com.jaeksoft.searchlib.parser.Parser;
import com.jaeksoft.searchlib.streamlimiter.StreamLimiter;

/**
 * Contient la liste des clauses "Disallow" d'un fichier "robots.txt" regroup�
 * par "User-agent".
 * 
 * @author ekeller
 * 
 */
public class DisallowList extends Parser {

	private Map list;

	public DisallowList() {
		super(null, false);
		list = null;
	}

	public Map getMap() {
		return list;
	}

	/**
	 * Retourne l'objet DisallowSet correspondant au User-agent pass� en
	 * param�tre.
	 * 
	 * @param userAgent
	 * @return
	 */
	protected DisallowSet get(String userAgent) {
		synchronized (this) {
			if (list == null)
				return null;
			return list.get(userAgent);
		}
	}

	/**
	 * Retourne l'objet DisallowSet correspondant au param�tre User-agent. S'il
	 * n'en existe pas, il en cr�e un nouveau.
	 * 
	 * @param userAgent
	 * @return
	 */
	protected DisallowSet getOrCreate(String userAgent) {
		synchronized (this) {
			if (list == null)
				list = new TreeMap();
			DisallowSet disallowSet = list.get(userAgent);
			if (disallowSet == null) {
				disallowSet = new DisallowSet(userAgent);
				list.put(userAgent, disallowSet);
			}
			return disallowSet;
		}
	}

	/**
	 * Extraction des informations disallow du fichier robots.txt
	 * 
	 * @param br
	 * @throws IOException
	 */
	@Override
	public void parseContent(StreamLimiter streamLimiter, LanguageEnum lang)
			throws IOException {
		BufferedReader br = new BufferedReader(new InputStreamReader(
				streamLimiter.getNewInputStream()));
		String line;
		DisallowSet currentDisallowSet = null;
		while ((line = br.readLine()) != null) {
			line = line.trim();
			if (line.startsWith("#"))
				continue;
			if (line.length() == 0)
				continue;
			StringTokenizer st = new StringTokenizer(line, ":");
			if (!st.hasMoreTokens())
				continue;
			String key = st.nextToken().trim();
			String value = null;
			if (!st.hasMoreTokens())
				continue;
			value = st.nextToken().trim();
			if ("User-agent".equalsIgnoreCase(key)) {
				currentDisallowSet = getOrCreate(value.toLowerCase());
			} else if ("Disallow".equalsIgnoreCase(key)) {
				if (currentDisallowSet != null)
					currentDisallowSet.add(value);
			}
		}
		br.close();
	}

	public long size() {
		if (list == null)
			return 0;
		return list.size();
	}

}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy