
eu.fbk.twm.wiki.xmldump.WikipediaFirstSentenceExtractor Maven / Gradle / Ivy
/*
* Copyright (2013) Fondazione Bruno Kessler (http://www.fbk.eu/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.fbk.twm.wiki.xmldump;
import de.tudarmstadt.ukp.wikipedia.parser.*;
import eu.fbk.twm.utils.CharacterTable;
import eu.fbk.twm.utils.Defaults;
import eu.fbk.twm.utils.ExtractorParameters;
import eu.fbk.twm.utils.WikipediaExtractor;
import eu.fbk.twm.wiki.xmldump.util.WikiMarkupParser;
import org.apache.commons.cli.*;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import java.io.*;
import java.util.Date;
import java.util.List;
import java.util.Locale;
//todo: check if it can be done in preprocessing
public class WikipediaFirstSentenceExtractor extends AbstractWikipediaExtractor implements WikipediaExtractor {
/**
* Define a static logger variable so that it references the
* Logger instance named WikipediaAbstractExtractor
.
*/
static Logger logger = Logger.getLogger(WikipediaFirstSentenceExtractor.class.getName());
private PrintWriter abstractWriter;
public WikipediaFirstSentenceExtractor(int numThreads, int numPages, Locale locale) {
super(numThreads, numPages, locale);
}
@Override
public void start(ExtractorParameters extractorParameters) {
try {
abstractWriter = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(extractorParameters.getWikipediaFirstSentenceFileName()), "UTF-8")));
} catch (IOException e) {
logger.error(e);
}
startProcess(extractorParameters.getWikipediaXmlFileName());
}
@Override
public void filePage(String text, String title, int wikiID) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void categoryPage(String text, String title, int wikiID) {
}
@Override
public void contentPage(String text, String title, int wikiID) {
try {
String s = getLinksForFirstSentence(text, title);
synchronized (this) {
abstractWriter.print(s);
}
} catch (IOException e) {
logger.error(e);
}
}
/**
* Returns the section titles. From this...
*/
public String getLinksForFirstSentence(String text, String title) throws IOException {
//todo: add Bio for Italian
StringBuilder sb = new StringBuilder();
WikiMarkupParser wikiMarkupParser = WikiMarkupParser.getInstance();
String[] prefixes = {imagePrefix, filePrefix};
ParsedPage parsedPage = wikiMarkupParser.parsePage(text, prefixes);
try {
List sectionList = parsedPage.getSections();
if (sectionList.size() > 0) {
Section section = sectionList.get(0);
List paragraphs = section.getParagraphs();
Paragraph firstPar = new Paragraph();
parLoop:
for (Paragraph p : paragraphs) {
if (p.getText().trim().length() == 0) {
continue;
}
List spans = p.getFormatSpans(Content.FormatType.ITALIC);
for (Span s : spans) {
if (s.getStart() == 0) {
continue parLoop;
}
}
if (p.getText().trim().startsWith(":")) {
continue;
}
firstPar = p;
break;
}
List links = firstPar.getLinks();
for (Link l : links) {
String target = l.getTarget();
if (target.startsWith("#")) {
continue;
}
String[] parts = target.split("#");
if (parts.length > 1) {
target = parts[0];
}
target = normalizePageName(target);
sb.append(title);
sb.append(CharacterTable.HORIZONTAL_TABULATION);
sb.append(target);
sb.append(CharacterTable.LINE_FEED);
}
// sb.append(title);
// sb.append(CharacterTable.HORIZONTAL_TABULATION);
// sb.append(section.getText().replace(CharacterTable.LINE_FEED, CharacterTable.SPACE).trim());
// sb.append(CharacterTable.LINE_FEED);
}
} catch (Exception e) {
logger.warn(e.getMessage());
}
return sb.toString();
}
@Override
public void disambiguationPage(String text, String title, int wikiID) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void templatePage(String text, String title, int wikiID) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void redirectPage(String text, String title, int wikiID) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void portalPage(String text, String title, int wikiID) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void projectPage(String text, String title, int wikiID) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void endProcess() {
super.endProcess();
abstractWriter.flush();
abstractWriter.close();
}
public static void main(String args[]) throws IOException {
String logConfig = System.getProperty("log-config");
if (logConfig == null) {
logConfig = "configuration/log-config.txt";
}
PropertyConfigurator.configure(logConfig);
Options options = new Options();
try {
Option wikipediaDumpOpt = OptionBuilder.withArgName("file").hasArg().withDescription("wikipedia xml dump file").isRequired().withLongOpt("wikipedia-dump").create("d");
Option outputDirOpt = OptionBuilder.withArgName("dir").hasArg().withDescription("output directory in which to store output files").isRequired().withLongOpt("output-dir").create("o");
Option numThreadOpt = OptionBuilder.withArgName("int").hasArg().withDescription("number of threads (default " + Defaults.DEFAULT_THREADS_NUMBER
+ ")").withLongOpt("num-threads").create("t");
Option numPageOpt = OptionBuilder.withArgName("int").hasArg().withDescription("number of pages to process (default all)").withLongOpt("num-pages").create("p");
Option notificationPointOpt = OptionBuilder.withArgName("int").hasArg().withDescription("receive notification every n pages (default " + Defaults.DEFAULT_NOTIFICATION_POINT
+ ")").withLongOpt("notification-point").create("n");
Option baseDirOpt = OptionBuilder.withDescription("if set, use the output folder as base dir").withLongOpt("base-dir").create();
options.addOption("h", "help", false, "print this message");
options.addOption("v", "version", false, "output version information and exit");
options.addOption(wikipediaDumpOpt);
options.addOption(outputDirOpt);
options.addOption(numThreadOpt);
options.addOption(numPageOpt);
options.addOption(notificationPointOpt);
options.addOption(baseDirOpt);
CommandLineParser parser = new PosixParser();
CommandLine line = parser.parse(options, args);
int numThreads = Defaults.DEFAULT_THREADS_NUMBER;
if (line.hasOption("num-threads")) {
numThreads = Integer.parseInt(line.getOptionValue("num-threads"));
}
int numPages = Defaults.DEFAULT_NUM_PAGES;
if (line.hasOption("num-pages")) {
numPages = Integer.parseInt(line.getOptionValue("num-pages"));
}
int notificationPoint = Defaults.DEFAULT_NOTIFICATION_POINT;
if (line.hasOption("notification-point")) {
notificationPoint = Integer.parseInt(line.getOptionValue("notification-point"));
}
ExtractorParameters extractorParameters;
if (line.hasOption("base-dir")) {
extractorParameters = new ExtractorParameters(line.getOptionValue("wikipedia-dump"), line.getOptionValue("output-dir"), true);
}
else {
extractorParameters = new ExtractorParameters(line.getOptionValue("wikipedia-dump"), line.getOptionValue("output-dir"));
}
logger.debug(extractorParameters);
logger.debug("extracting abstracts (" + extractorParameters.getWikipediaAbstractFileName() + ")...");
WikipediaFirstSentenceExtractor wikipediaAbstractExtractor = new WikipediaFirstSentenceExtractor(numThreads, numPages, extractorParameters.getLocale());
wikipediaAbstractExtractor.setNotificationPoint(notificationPoint);
wikipediaAbstractExtractor.start(extractorParameters);
logger.info("extraction ended " + new Date());
} catch (ParseException e) {
// oops, something went wrong
System.out.println("Parsing failed: " + e.getMessage() + "\n");
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(400, "java -cp dist/thewikimachine.jar org.fbk.cit.hlt.thewikimachine.xmldump.WikipediaAbstractExtractor", "\n", options, "\n", true);
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy