org.apache.hadoop.examples.dancing.DistributedPentomino Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of hadoop-mapreduce-examples Show documentation
Show all versions of hadoop-mapreduce-examples Show documentation
Apache Hadoop MapReduce Examples
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.examples.dancing;
import java.io.*;
import java.util.List;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.*;
import com.google.common.base.Charsets;
/**
* Launch a distributed pentomino solver.
* It generates a complete list of prefixes of length N with each unique prefix
* as a separate line. A prefix is a sequence of N integers that denote the
* index of the row that is choosen for each column in order. Note that the
* next column is heuristically choosen by the solver, so it is dependant on
* the previous choice. That file is given as the input to
* map/reduce. The output key/value are the move prefix/solution as Text/Text.
*/
public class DistributedPentomino extends Configured implements Tool {
private static final int PENT_DEPTH = 5;
private static final int PENT_WIDTH = 9;
private static final int PENT_HEIGHT = 10;
private static final int DEFAULT_MAPS = 2000;
/**
* Each map takes a line, which represents a prefix move and finds all of
* the solutions that start with that prefix. The output is the prefix as
* the key and the solution as the value.
*/
public static class PentMap extends
Mapper, Text, Text, Text> {
private int width;
private int height;
private int depth;
private Pentomino pent;
private Text prefixString;
private Context context;
/**
* For each solution, generate the prefix and a string representation
* of the solution. The solution starts with a newline, so that the output
* looks like:
* ,
*
*
*/
class SolutionCatcher
implements DancingLinks.SolutionAcceptor {
public void solution(List> answer) {
String board = Pentomino.stringifySolution(width, height, answer);
try {
context.write(prefixString, new Text("\n" + board));
context.getCounter(pent.getCategory(answer)).increment(1);
} catch (IOException e) {
System.err.println(StringUtils.stringifyException(e));
} catch (InterruptedException ie) {
System.err.println(StringUtils.stringifyException(ie));
}
}
}
/**
* Break the prefix string into moves (a sequence of integer row ids that
* will be selected for each column in order). Find all solutions with
* that prefix.
*/
public void map(WritableComparable> key, Text value,Context context)
throws IOException {
prefixString = value;
StringTokenizer itr = new StringTokenizer(prefixString.toString(), ",");
int[] prefix = new int[depth];
int idx = 0;
while (itr.hasMoreTokens()) {
String num = itr.nextToken();
prefix[idx++] = Integer.parseInt(num);
}
pent.solve(prefix);
}
@Override
public void setup(Context context) {
this.context = context;
Configuration conf = context.getConfiguration();
depth = conf.getInt(Pentomino.DEPTH, PENT_DEPTH);
width = conf.getInt(Pentomino.WIDTH, PENT_WIDTH);
height = conf.getInt(Pentomino.HEIGHT, PENT_HEIGHT);
pent = (Pentomino)
ReflectionUtils.newInstance(conf.getClass(Pentomino.CLASS,
OneSidedPentomino.class),
conf);
pent.initialize(width, height);
pent.setPrinter(new SolutionCatcher());
}
}
/**
* Create the input file with all of the possible combinations of the
* given depth.
* @param fs the filesystem to write into
* @param dir the directory to write the input file into
* @param pent the puzzle
* @param depth the depth to explore when generating prefixes
*/
private static long createInputDirectory(FileSystem fs,
Path dir,
Pentomino pent,
int depth
) throws IOException {
fs.mkdirs(dir);
List splits = pent.getSplits(depth);
Path input = new Path(dir, "part1");
PrintWriter file =
new PrintWriter(new OutputStreamWriter(new BufferedOutputStream
(fs.create(input), 64*1024), Charsets.UTF_8));
for(int[] prefix: splits) {
for(int i=0; i < prefix.length; ++i) {
if (i != 0) {
file.print(',');
}
file.print(prefix[i]);
}
file.print('\n');
}
file.close();
return fs.getFileStatus(input).getLen();
}
/**
* Launch the solver on 9x10 board and the one sided pentominos.
* This takes about 2.5 hours on 20 nodes with 2 cpus/node.
* Splits the job into 2000 maps and 1 reduce.
*/
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(),
new DistributedPentomino(), args);
System.exit(res);
}
public int run(String[] args) throws Exception {
Configuration conf = getConf();
if (args.length == 0) {
System.out.println("Usage: pentomino