All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.mahout.common.AbstractJob Maven / Gradle / Ivy

There is a newer version: 0.13.0
Show newest version
/**
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.mahout.common;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicInteger;

import com.google.common.base.Preconditions;
import org.apache.commons.cli2.CommandLine;
import org.apache.commons.cli2.Group;
import org.apache.commons.cli2.Option;
import org.apache.commons.cli2.OptionException;
import org.apache.commons.cli2.builder.ArgumentBuilder;
import org.apache.commons.cli2.builder.DefaultOptionBuilder;
import org.apache.commons.cli2.builder.GroupBuilder;
import org.apache.commons.cli2.commandline.Parser;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.mahout.common.commandline.DefaultOptionCreator;
import org.apache.mahout.common.lucene.AnalyzerUtils;
import org.apache.mahout.math.VectorWritable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * 

Superclass of many Mahout Hadoop "jobs". A job drives configuration and launch of one or * more maps and reduces in order to accomplish some task.

* *

Command line arguments available to all subclasses are:

* *
    *
  • --tempDir (path): Specifies a directory where the job may place temp files * (default "temp")
  • *
  • --help: Show help message
  • *
* *

In addition, note some key command line parameters that are parsed by Hadoop, which jobs * may need to set:

* *
    *
  • -Dmapred.job.name=(name): Sets the Hadoop task names. It will be suffixed by * the mapper and reducer class names
  • *
  • -Dmapred.output.compress={true,false}: Compress final output (default true)
  • *
  • -Dmapred.input.dir=(path): input file, or directory containing input files (required)
  • *
  • -Dmapred.output.dir=(path): path to write output files (required)
  • *
* *

Note that because of how Hadoop parses arguments, all "-D" arguments must appear before all other * arguments.

*/ public abstract class AbstractJob extends Configured implements Tool { private static final Logger log = LoggerFactory.getLogger(AbstractJob.class); /** option used to specify the input path */ private Option inputOption; /** option used to specify the output path */ private Option outputOption; /** input path, populated by {@link #parseArguments(String[])} */ protected Path inputPath; protected File inputFile; //the input represented as a file /** output path, populated by {@link #parseArguments(String[])} */ protected Path outputPath; protected File outputFile; //the output represented as a file /** temp path, populated by {@link #parseArguments(String[])} */ protected Path tempPath; protected Map> argMap; /** internal list of options that have been added */ private final List




© 2015 - 2024 Weber Informatics LLC | Privacy Policy