All Downloads are FREE. Search and download functionalities are using the official Maven repository.

eu.fbk.twm.wiki.xmldump.WikipediaFileSourceExtractor Maven / Gradle / Ivy

/*
 * Copyright (2013) Fondazione Bruno Kessler (http://www.fbk.eu/)
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package eu.fbk.twm.wiki.xmldump;

import eu.fbk.twm.utils.*;
import org.apache.commons.cli.*;
import org.apache.commons.cli.OptionBuilder;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;

import java.io.*;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Date;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * Created with IntelliJ IDEA.
 * User: giuliano
 * Date: 2/21/13
 * Time: 2:46 PM
 * To change this template use File | Settings | File Templates.
 */
public class WikipediaFileSourceExtractor extends AbstractWikipediaExtractor implements WikipediaExtractor {
	/**
	 * Define a static logger variable so that it references the
	 * Logger instance named WikipediaFileSourceExtractor.
	 */
	static Logger logger = Logger.getLogger(WikipediaFileSourceExtractor.class.getName());

	private PrintWriter fileSourceWriter;

	public static final String COMMONS_LABEL = "commons";

	//private PageSet fileNameSet;
	private Set fileNameSet;

	private PageMap redirectPageMap;

	public final static Pattern pipePattern = Pattern.compile("\\|");

	public WikipediaFileSourceExtractor(int numThreads, int numPages, Locale locale) {
		super(numThreads, numPages, locale);
	}

	@Override
	public void start(ExtractorParameters extractorParameters) {
		try {

			redirectPageMap = new PageMap(new File(extractorParameters.getWikipediaRedirFileName()));
			logger.info(redirectPageMap.size() + " redirect pages");

			fileSourceWriter = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(extractorParameters.getWikipediaFileSourceName()), "UTF-8")));

			//fileNameSet = new PageSet(new File(extractorParameters.getWikipediaFileName()));
			fileNameSet = read(new File(extractorParameters.getWikipediaFileName()));
			logger.info(fileNameSet.size() + " file pages");

		} catch (IOException e) {
			logger.error(e);
		}
		startProcess(extractorParameters.getWikipediaXmlFileName());
	}

	@Override
	public void filePage(String text, String title, int wikiID) {
		//logger.debug(title);
		//To change body of implemented methods use File | Settings | File Templates.
	}

	@Override
	public void disambiguationPage(String text, String title, int wikiID) {
		//To change body of implemented methods use File | Settings | File Templates.
	}

	@Override
	public void categoryPage(String text, String title, int wikiID) {
		//To change body of implemented methods use File | Settings | File Templates.
	}

	@Override
	public void templatePage(String text, String title, int wikiID) {
		//To change body of implemented methods use File | Settings | File Templates.
	}

	@Override
	public void redirectPage(String text, String title, int wikiID) {
		//To change body of implemented methods use File | Settings | File Templates.
	}

	@Override
	public void portalPage(String text, String title, int wikiID) {
		//To change body of implemented methods use File | Settings | File Templates.
	}

	@Override
	public void projectPage(String text, String title, int wikiID) {
		//To change body of implemented methods use File | Settings | File Templates.
	}

	@Override
	public void contentPage(String text, String title, int wikiID) {
		Matcher m = filePattern.matcher(text);
		StringBuilder buff = new StringBuilder();
		String fileName;
		String redirectPage, secondRedirectPage;
		int startName, endName;
		int startExtension, endExtension;
		while (m.find()) {
			startName = m.start(1);
			endName = m.end(1);
			startExtension = m.start(2);
			endExtension = m.end(2);

			//logger.debug(title + "\t" + text.substring(m.start(0), m.end(0)));
			//logger.debug(title + "\t" + text.substring(startName, endName) + "\t" + text.substring(startExtension, endExtension));
			//args = pipePattern.split(text.substring(s, e));
			//logger.debug(title + "\t" + args.length);
			//todo:only trim on the left
			fileName = text.substring(startName, endName).replace(CharacterTable.SPACE, CharacterTable.LOW_LINE) + StringTable.FULL_STOP + text.substring(startExtension, endExtension);

			redirectPage = redirectPageMap.get(filePrefix + fileName);
			//logger.debug(fileName + "\t" + redirectPage + "\t" + filePrefix + fileName);
			//todo: check multiple redirects
			if (redirectPage != null) {
				//logger.warn(fileName + " -> " + redirectPage);
				fileName = redirectPage.substring(filePrefix.length(), redirectPage.length());
				//logger.warn(fileName + " == " + redirectPage);
			}

			//logger.debug(title + "\t" + fileName);
			buff.append(title);
			buff.append(CharacterTable.HORIZONTAL_TABULATION);
			if (fileNameSet.contains(fileName)) {
				//the file is in wikipedia
				//logger.debug(getLocale().getLanguage() + "\t" + fileName);
				buff.append(getLocale().getLanguage());
			}
			else {
				//the file is in commons
				//logger.debug("COMMONS_LABEL\t" + fileName);
				buff.append(COMMONS_LABEL);
			}
			buff.append(getDir(fileName));
			buff.append(fileName);
			//buff.append(CharacterTable.VERTICAL_LINE);
			/*if (args.length > 1)
			{
				//logger.debug(title + "\t" + args[args.length-1]);
				buff.append(args[args.length-1]);
			} */
			buff.append(CharacterTable.LINE_FEED);
		}

		synchronized (this) {
			fileSourceWriter.print(buff);
		}
	}

	private String getDir(String name) {
		byte[] bytesOfMessage = new byte[0];
		try {
			bytesOfMessage = name.getBytes("UTF-8");
		} catch (UnsupportedEncodingException e) {
			logger.error(e);
		}

		MessageDigest md = null;
		try {
			md = MessageDigest.getInstance("MD5");
		} catch (NoSuchAlgorithmException e) {
			logger.error(e);
		}
		byte[] digest = md.digest(bytesOfMessage);
		BigInteger bigInt = new BigInteger(1, digest);
		String hash = bigInt.toString(16);

		while (hash.length() < 32) {
			hash = "0" + hash;
		}

		return File.separator + hash.substring(0, 1) + File.separator + hash.substring(0, 2) + File.separator;
	}

	@Override
	public void endProcess() {
		super.endProcess();
		fileSourceWriter.close();
	}

	//
	private Set read(File dis) throws IOException {
		Set set = new HashSet();
		if (!dis.exists()) {
			return set;
		}

		LineNumberReader reader = new LineNumberReader(new InputStreamReader(new FileInputStream(dis), "UTF-8"));

		String line = null;
		int j = 1;

		// read pages
		while ((line = reader.readLine()) != null) {
			if ((j % 100000) == 0) {
				System.out.print(".");
			}


			if (line.startsWith("File:")) {
				 set.add(line.substring(5, line.length()));
			} else {
				set.add(line);
			}


			j++;
		} // end while
		reader.close();

		if (j > 100000) {
			System.out.print("\n");
		}

		return set;
	} // end read

	public static void main(String args[]) throws IOException {
		String logConfig = System.getProperty("log-config");
		if (logConfig == null) {
			logConfig = "configuration/log-config.txt";
		}

		PropertyConfigurator.configure(logConfig);

		Options options = new Options();
		try {
			Option wikipediaDumpOpt = OptionBuilder.withArgName("commonswiki-dump").hasArg().withDescription("commonswiki xml dump file").isRequired().withLongOpt("commonswiki-dump").create("d");
			Option outputDirOpt = OptionBuilder.withArgName("dir").hasArg().withDescription("output directory in which to store output files").isRequired().withLongOpt("output-dir").create("o");
			Option numThreadOpt = OptionBuilder.withArgName("int").hasArg().withDescription("number of threads (default " + Defaults.DEFAULT_THREADS_NUMBER + ")").withLongOpt("num-threads").create("t");
			Option numPageOpt = OptionBuilder.withArgName("int").hasArg().withDescription("number of pages to process (default all)").withLongOpt("num-pages").create("p");
			Option notificationPointOpt = OptionBuilder.withArgName("int").hasArg().withDescription("receive notification every n pages (default " + Defaults.DEFAULT_NOTIFICATION_POINT + ")").withLongOpt("notification-point").create("n");

			options.addOption("h", "help", false, "print this message");
			options.addOption("v", "version", false, "output version information and exit");


			options.addOption(wikipediaDumpOpt);
			options.addOption(outputDirOpt);
			options.addOption(numThreadOpt);
			options.addOption(numPageOpt);
			options.addOption(notificationPointOpt);

			CommandLineParser parser = new PosixParser();
			CommandLine line = parser.parse(options, args);


			int numThreads = Defaults.DEFAULT_THREADS_NUMBER;
			if (line.hasOption("num-threads")) {
				numThreads = Integer.parseInt(line.getOptionValue("num-threads"));
			}

			int numPages = Defaults.DEFAULT_NUM_PAGES;
			if (line.hasOption("num-pages")) {
				numPages = Integer.parseInt(line.getOptionValue("num-pages"));
			}

			int notificationPoint = Defaults.DEFAULT_NOTIFICATION_POINT;
			if (line.hasOption("notification-point")) {
				notificationPoint = Integer.parseInt(line.getOptionValue("notification-point"));
			}

			ExtractorParameters extractorParameters = new ExtractorParameters(line.getOptionValue("commonswiki-dump"), line.getOptionValue("output-dir"));
			logger.debug(extractorParameters);

			logger.debug("extracting files from page (" + extractorParameters.getWikipediaFileSourceName() + ")...");
			WikipediaFileSourceExtractor wikipediaFileSourceExtractor = new WikipediaFileSourceExtractor(numThreads, numPages, extractorParameters.getLocale());
			wikipediaFileSourceExtractor.setNotificationPoint(notificationPoint);
			wikipediaFileSourceExtractor.start(extractorParameters);

			logger.info("extraction ended " + new Date());

		} catch (ParseException e) {
			// oops, something went wrong
			System.out.println("Parsing failed: " + e.getMessage() + "\n");
			HelpFormatter formatter = new HelpFormatter();
			formatter.printHelp(400, "java -cp dist/thewikimachine.jar org.fbk.cit.hlt.thewikimachine.xmldump.WikipediaFileSourceExtractor", "\n", options, "\n", true);
		}
	}

}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy