
org.seqdoop.hadoop_bam.cli.CLIMergingAnySAMOutputFormat Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of hadoop-bam Show documentation
Show all versions of hadoop-bam Show documentation
A Java library for the manipulation of files in common bioinformatics formats using the Hadoop MapReduce framework.
// Copyright (c) 2013 Aalto University
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
// File created: 2013-06-25 16:24:52
package org.seqdoop.hadoop_bam.cli;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.seqdoop.hadoop_bam.KeyIgnoringAnySAMOutputFormat;
import org.seqdoop.hadoop_bam.SAMRecordWritable;
import hbparquet.hadoop.util.ContextUtil;
// Like a KeyIgnoringAnySAMOutputFormat, but sets the SAMFileHeader to
// Utils.getSAMHeaderMerger().getMergedHeader() and allows the output directory
// (the "work directory") to exist.
public class CLIMergingAnySAMOutputFormat
extends FileOutputFormat
{
private KeyIgnoringAnySAMOutputFormat baseOF;
private void initBaseOF(Configuration conf) {
if (baseOF == null)
baseOF = new KeyIgnoringAnySAMOutputFormat(conf);
}
@Override public RecordWriter getRecordWriter(
TaskAttemptContext context)
throws IOException
{
initBaseOF(ContextUtil.getConfiguration(context));
if (baseOF.getSAMHeader() == null)
baseOF.setSAMHeader(Utils.getSAMHeaderMerger(
ContextUtil.getConfiguration(context)).getMergedHeader());
return baseOF.getRecordWriter(context, getDefaultWorkFile(context, ""));
}
@Override public Path getDefaultWorkFile(TaskAttemptContext ctx, String ext)
throws IOException
{
initBaseOF(ContextUtil.getConfiguration(ctx));
return Utils.getMergeableWorkFile(
baseOF.getDefaultWorkFile(ctx, ext).getParent(), "", "", ctx, ext);
}
// Allow the output directory to exist.
@Override public void checkOutputSpecs(JobContext job) {}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy