All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.github.brandtg.switchboard.HdfsLogIterator Maven / Gradle / Ivy

The newest version!
/**
 * Copyright (C) 2015 Greg Brandt ([email protected])
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.github.brandtg.switchboard;

import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.InputStream;
import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.net.InetSocketAddress;
import java.util.concurrent.Executors;
import java.util.concurrent.ExecutorService;

public class HdfsLogIterator extends LogIterator {
  private static final Logger LOG = LoggerFactory.getLogger(HdfsLogIterator.class);

  private final InputStream inputStream;

  private HdfsLogParser hdfsParser;

  /**
   * Provides an iterator of FSEditLogOps over a stream of concatenated HDFS logs.
   */
  public HdfsLogIterator(InputStream inputStream) throws IOException {
    super();
    this.inputStream = inputStream;
    this.hdfsParser = new HdfsLogParser(inputStream);
  }

  @Override
  public FSEditLogOp next() {
    while (true) {
      try {
        return hdfsParser.nextOp();
      } catch (IOException e) {
        try {
          int available = inputStream.available();
          long skipped = inputStream.skip(available);
          if (available != skipped) {
            throw new IOException("Could not skip rest of input stream");
          }

          hdfsParser.resetReader();
        } catch (IOException e2) {
          throw new IllegalStateException(e2);
        }
      }
    }
  }

  /**
   * Process listens to edit log events and prints their toString to STDOUT.
   */
  public static void main(String[] args) throws Exception {
    if (args.length != 2) {
      throw new Exception("usage: sourceHost:sourcePort serverPort");
    }
    String[] sourceHostPort = args[0].split(":");
    InetSocketAddress sourceAddress = new InetSocketAddress(sourceHostPort[0], Integer.valueOf(sourceHostPort[1]));
    InetSocketAddress serverAddress = new InetSocketAddress(Integer.valueOf(args[1]));
    PipedOutputStream outputStream = new PipedOutputStream();

    final EventLoopGroup eventLoopGroup = new NioEventLoopGroup();
    final LogReceiver logReceiver = new LogReceiver(serverAddress, eventLoopGroup, outputStream);
    final LogPuller logPuller = new LogPuller(sourceAddress, serverAddress, "*", 0);
    final ExecutorService pullerExecutor = Executors.newSingleThreadExecutor();

    Runtime.getRuntime().addShutdownHook(new Thread() {
      @Override
      public void run() {
        try {
          logPuller.shutdown();
          pullerExecutor.shutdown();
          logReceiver.shutdown();
        } catch (Exception e) {
          LOG.error("Exception while shutting down log receiver", e);
        }
        eventLoopGroup.shutdownGracefully();
      }
    });

    logReceiver.registerListener(logPuller);
    logReceiver.start();
    pullerExecutor.submit(logPuller);

    // Print edit log ops to console
    PipedInputStream inputStream = new PipedInputStream(outputStream, 1024 * 1024);
    LogIterator itr = new HdfsLogIterator(inputStream);
    while (itr.hasNext()) {
      System.out.println(itr.next());
    }
  }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy