All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.mongodb.hadoop.hive.output.HiveMongoOutputFormat Maven / Gradle / Ivy

Go to download

The MongoDB Connector for Hadoop is a plugin for Hadoop that provides the ability to use MongoDB as an input source and/or an output destination.

There is a newer version: 2.0.2
Show newest version
/*
 * Copyright 2010-2013 10gen Inc.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.mongodb.hadoop.hive.output;

import com.mongodb.hadoop.io.BSONWritable;
import com.mongodb.hadoop.output.MongoOutputCommitter;
import com.mongodb.hadoop.output.MongoRecordWriter;
import com.mongodb.hadoop.util.CompatUtils;
import com.mongodb.hadoop.util.MongoConfigUtil;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TaskAttemptContext;
import org.apache.hadoop.util.Progressable;

import java.io.IOException;
import java.util.Properties;

/*
 * Define a HiveMongoOutputFormat that specifies how Hive should write data in
 * Hive tables into MongoDB
 */
public class HiveMongoOutputFormat implements HiveOutputFormat {

    @Override
    public RecordWriter getHiveRecordWriter(final JobConf conf,
                                            final Path finalOutPath,
                                            final Class valueClass,
                                            final boolean isCompressed,
                                            final Properties tableProperties,
                                            final Progressable progress) throws IOException {
        return new HiveMongoRecordWriter(conf);
    }


    @Override
    public void checkOutputSpecs(final FileSystem arg0, final JobConf arg1) throws IOException {
    }


    @Override
    public org.apache.hadoop.mapred.RecordWriter
    getRecordWriter(final FileSystem arg0, final JobConf arg1, final String arg2, final Progressable arg3) throws IOException {
        throw new IOException("Hive should call 'getHiveRecordWriter' instead of 'getRecordWriter'");
    }


    /*
     * HiveMongoRecordWriter ->
     * MongoRecordWriter used to write from Hive into BSON Objects
     */
    private class HiveMongoRecordWriter
        extends MongoRecordWriter
        implements RecordWriter {

        private final CompatUtils.TaskAttemptContext context;
        private final MongoOutputCommitter committer;

        public HiveMongoRecordWriter(final JobConf conf) {
            super(
              MongoConfigUtil.getOutputCollection(conf),
              CompatUtils.getTaskAttemptContext(
                conf, conf.get("mapred.task.id")));
            context =
              CompatUtils.getTaskAttemptContext(
                conf, conf.get("mapred.task.id"));
            committer = new MongoOutputCommitter();
        }

        @Override
        public void close(final boolean abort) throws IOException {
            // Disambiguate call to super.close().
            super.close((TaskAttemptContext) null);
            if (abort) {
                committer.abortTask(context);
            } else if (committer.needsTaskCommit(context)) {
                committer.commitTask(context);
            }
        }

        @Override
        public void write(final Writable w) throws IOException {
            super.write(null, (BSONWritable) w);
        }
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy