All Downloads are FREE. Search and download functionalities are using the official Maven repository.

eu.dicodeproject.analysis.generic.GenericTableDriver Maven / Gradle / Ivy

Go to download

The examples module provides glue code implementation for extracting common phrases, key word distributions and more from tweets stored on HDFS/HBase. It builds on Mahout for more sophisticated analysis.

The newest version!
/**
 * Copyright (C) 2010, 2011 Neofonie GmbH
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package eu.dicodeproject.analysis.generic;

import java.io.IOException;
import java.util.Map;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.util.ToolRunner;
import org.apache.mahout.common.AbstractJob;
import org.apache.mahout.common.commandline.DefaultOptionCreator;



/**
 * Reads text from a configurable HBase table and column, extracts the content 
 * (or in some cases single items from a list seperated by a separator like '#')
 * and writes the counts to HDFS.
 */
public final class GenericTableDriver extends AbstractJob {

  private GenericTableDriver() {
    // don't instantiate drivers
  }

  public static void main(String args[]) throws Exception {
    ToolRunner.run(new GenericTableDriver(), args);
  }

  @Override
  public int run(String[] args) throws ClassNotFoundException, InterruptedException, IOException {
    addOption(DefaultOptionCreator.numReducersOption().create());
    addOption("input", "i", "The directory holding word/count pairs", "tmp");
    addOption("table", "t", "The hbase table storing the results.", "dummy-results");    
    addOption("resultId", "r", "The result which will be the row prefix.", "");

    Map argMap = parseArguments(args);
    if (argMap == null) {
      return -1;
    }
    String input = argMap.get("--input");
    String table = argMap.get("--table");

    Configuration conf = HBaseConfiguration.create();
    conf.set("resultId", argMap.get("--resultId")); // for the reducer
    Job job = new Job(conf);

    job.setJarByClass(GenericTableDriver.class);
    job.setInputFormatClass(SequenceFileInputFormat.class);

    FileInputFormat.addInputPath(job, new Path(input));
    job.setJobName("HBaseDocumentProcessor::GenericTableWriter");

    job.setMapperClass(Mapper.class);
    job.setMapOutputKeyClass(IntWritable.class);
    job.setMapOutputValueClass(Text.class);

    TableMapReduceUtil.initTableReducerJob(table, GenericTableReducer.class, job);

    job.setNumReduceTasks(1);
    job.waitForCompletion(true);
    return 0;
  }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy