eu.dicodeproject.analysis.hbase.HBaseLuceneTokenizerDriver Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of integration Show documentation
Show all versions of integration Show documentation
The examples module provides glue code implementation for extracting common phrases, key word distributions and more from tweets stored on HDFS/HBase. It builds on Mahout for more sophisticated analysis.
The newest version!
/**
* Copyright (C) 2010, 2011 Neofonie GmbH
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.dicodeproject.analysis.hbase;
import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.ToolRunner;
import org.apache.lucene.analysis.Analyzer;
import org.apache.mahout.common.AbstractJob;
import org.apache.mahout.common.commandline.DefaultOptionCreator;
import org.apache.mahout.vectorizer.DefaultAnalyzer;
import org.apache.mahout.vectorizer.DocumentProcessor;
/**
* Reads text from a configurable HBase table and column, tokenizes with Lucene
* and writes the resulting tokenized stuff to HDFS for further processing by
* the Mahout colloc driver.
*/
public class HBaseLuceneTokenizerDriver extends AbstractJob {
private HBaseLuceneTokenizerDriver() {
// don't instantiate drivers
}
public static void main(String args[]) throws Exception {
ToolRunner.run(new HBaseLuceneTokenizerDriver(), args);
}
@Override
public int run(String[] args) throws ClassNotFoundException, IllegalAccessException, InstantiationException, InterruptedException, IOException {
addOutputOption();
addOption(DefaultOptionCreator.numReducersOption().create());
addOption("analyzerName", "a", "The class name of the analyzer to use for preprocessing", null);
addOption("table", "t", "The hbase table holding our data.", "twittertracker");
addOption("family", "f", "The column family holding our data.", "textFamily");
addOption("column", "c", "The column holding our data.", "text");
Map argMap = parseArguments(args);
if (argMap == null) {
return -1;
}
Class extends Analyzer> analyzerClass = DefaultAnalyzer.class;
if (argMap.get("--analyzerName") != null) {
String className = argMap.get("--analyzerName");
analyzerClass = Class.forName(className).asSubclass(Analyzer.class);
// try instantiating it, b/c there isn't any point in setting it if
// you can't instantiate it
analyzerClass.newInstance();
}
String table = argMap.get("--table");
String family = argMap.get("--family");
String column = argMap.get("--column");
Path output = getOutputPath();
Path tokenizedPath = new Path(output, DocumentProcessor.TOKENIZED_DOCUMENT_OUTPUT_FOLDER);
HBaseDocumentProcessor.tokenizeDocuments(table, family, column, analyzerClass, tokenizedPath);
return 0;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy