Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hive.hcatalog.cli;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Properties;
import com.facebook.presto.hive.$internal.org.apache.commons.cli.CommandLine;
import com.facebook.presto.hive.$internal.org.apache.commons.cli.GnuParser;
import com.facebook.presto.hive.$internal.org.apache.commons.cli.HelpFormatter;
import com.facebook.presto.hive.$internal.org.apache.commons.cli.Option;
import com.facebook.presto.hive.$internal.org.apache.commons.cli.OptionBuilder;
import com.facebook.presto.hive.$internal.org.apache.commons.cli.Options;
import com.facebook.presto.hive.$internal.org.apache.commons.cli.ParseException;
import com.facebook.presto.hive.$internal.org.apache.commons.cli.Parser;
import com.facebook.presto.hive.$internal.org.apache.commons.lang.StringUtils;
import com.facebook.presto.hive.$internal.org.slf4j.Logger;
import com.facebook.presto.hive.$internal.org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.common.LogUtils;
import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.processors.DfsProcessor;
import org.apache.hadoop.hive.ql.processors.SetProcessor;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.hive.hcatalog.common.HCatConstants;
import org.apache.hive.hcatalog.common.HCatUtil;
public class HCatCli {
private static Logger LOG = null;
@SuppressWarnings("static-access")
public static void main(String[] args) {
try {
LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
}
LOG = LoggerFactory.getLogger(HCatCli.class);
CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
ss.in = System.in;
try {
ss.out = new PrintStream(System.out, true, "UTF-8");
ss.err = new PrintStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException e) {
System.exit(1);
}
HiveConf conf = ss.getConf();
HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());
String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE);
final String MR_ENGINE = "mr";
if(!MR_ENGINE.equalsIgnoreCase(engine)) {
HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_ENGINE, MR_ENGINE);
LOG.info("Forcing " + ConfVars.HIVE_EXECUTION_ENGINE + " to " + MR_ENGINE);
}
Options options = new Options();
// -e 'quoted-query-string'
options.addOption(OptionBuilder
.hasArg()
.withArgName("exec")
.withDescription("hcat command given from command line")
.create('e'));
// -f
options.addOption(OptionBuilder
.hasArg()
.withArgName("file")
.withDescription("hcat commands in file")
.create('f'));
// -g
options.addOption(OptionBuilder
.hasArg().
withArgName("group").
withDescription("group for the db/table specified in CREATE statement").
create('g'));
// -p
options.addOption(OptionBuilder
.hasArg()
.withArgName("perms")
.withDescription("permissions for the db/table specified in CREATE statement")
.create('p'));
// -D
options.addOption(OptionBuilder
.hasArgs(2)
.withArgName("property=value")
.withValueSeparator()
.withDescription("use hadoop value for given property")
.create('D'));
// [-h|--help]
options.addOption(new Option("h", "help", false, "Print help information"));
Parser parser = new GnuParser();
CommandLine cmdLine = null;
try {
cmdLine = parser.parse(options, args);
} catch (ParseException e) {
printUsage(options, System.err);
// Note, we print to System.err instead of ss.err, because if we can't parse our
// commandline, we haven't even begun, and therefore cannot be expected to have
// reasonably constructed or started the SessionState.
System.exit(1);
}
// -D : process these first, so that we can instantiate SessionState appropriately.
setConfProperties(conf, cmdLine.getOptionProperties("D"));
// -h
if (cmdLine.hasOption('h')) {
printUsage(options, ss.out);
sysExit(ss, 0);
}
// -e
String execString = (String) cmdLine.getOptionValue('e');
// -f
String fileName = (String) cmdLine.getOptionValue('f');
if (execString != null && fileName != null) {
ss.err.println("The '-e' and '-f' options cannot be specified simultaneously");
printUsage(options, ss.err);
sysExit(ss, 1);
}
// -p
String perms = (String) cmdLine.getOptionValue('p');
if (perms != null) {
validatePermissions(ss, conf, perms);
}
// -g
String grp = (String) cmdLine.getOptionValue('g');
if (grp != null) {
conf.set(HCatConstants.HCAT_GROUP, grp);
}
// Now that the properties are in, we can instantiate SessionState.
SessionState.start(ss);
// all done parsing, let's run stuff!
if (execString != null) {
sysExit(ss, processLine(execString));
}
try {
if (fileName != null) {
sysExit(ss, processFile(fileName));
}
} catch (FileNotFoundException e) {
ss.err.println("Input file not found. (" + e.getMessage() + ")");
sysExit(ss, 1);
} catch (IOException e) {
ss.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
sysExit(ss, 1);
}
// -h
printUsage(options, ss.err);
sysExit(ss, 1);
}
/**
* Wrapper for System.exit that makes sure we close our SessionState
* before we exit. This ignores any error generated by attempting to
* close the session state, merely printing out the error. The return
* code is not changed in such an occurrence because we want to retain
* the success code of whatever command we already ran.
*/
private static void sysExit(SessionState ss, int retCode) {
try {
ss.close();
} catch (IOException e) {
// If we got an error attempting to ss.close, then it's not likely that
// ss.err is valid. So we're back to System.err. Also, we don't change
// the return code, we simply log a warning, and return whatever return
// code we expected to do already.
System.err.println(e.getMessage());
e.printStackTrace(System.err);
}
System.exit(retCode);
}
private static void setConfProperties(HiveConf conf, Properties props) {
for (java.util.Map.Entry