Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.tools;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.URL;
import java.net.URLConnection;
import java.security.PrivilegedExceptionAction;
import java.util.Collection;
import java.util.Date;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.HftpFileSystem;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
import org.apache.hadoop.hdfs.server.namenode.CancelDelegationTokenServlet;
import org.apache.hadoop.hdfs.server.namenode.GetDelegationTokenServlet;
import org.apache.hadoop.hdfs.server.namenode.RenewDelegationTokenServlet;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.GenericOptionsParser;
/**
* Fetch a DelegationToken from the current Namenode and store it in the
* specified file.
*/
@InterfaceAudience.Private
public class DelegationTokenFetcher {
private static final Log LOG =
LogFactory.getLog(DelegationTokenFetcher.class);
private static final String WEBSERVICE = "webservice";
private static final String RENEWER = "renewer";
private static final String CANCEL = "cancel";
private static final String RENEW = "renew";
private static final String PRINT = "print";
private static final String HELP = "help";
private static final String HELP_SHORT = "h";
private static void printUsage(PrintStream err) {
err.println("fetchdt retrieves delegation tokens from the NameNode");
err.println();
err.println("fetchdt ");
err.println("Options:");
err.println(" --webservice Url to contact NN on");
err.println(" --renewer Name of the delegation token renewer");
err.println(" --cancel Cancel the delegation token");
err.println(" --renew Renew the delegation token. Delegation "
+ "token must have been fetched using the --renewer option.");
err.println(" --print Print the delegation token");
err.println();
GenericOptionsParser.printGenericCommandUsage(err);
System.exit(1);
}
private static Collection> readTokens(Path file, Configuration conf)
throws IOException {
Credentials creds = Credentials.readTokenStorageFile(file, conf);
return creds.getAllTokens();
}
/**
* Command-line interface
*/
public static void main(final String[] args) throws Exception {
final Configuration conf = new HdfsConfiguration();
Options fetcherOptions = new Options();
fetcherOptions.addOption(WEBSERVICE, true,
"HTTP url to reach the NameNode at");
fetcherOptions.addOption(RENEWER, true,
"Name of the delegation token renewer");
fetcherOptions.addOption(CANCEL, false, "cancel the token");
fetcherOptions.addOption(RENEW, false, "renew the token");
fetcherOptions.addOption(PRINT, false, "print the token");
fetcherOptions.addOption(HELP_SHORT, HELP, false, "print out help information");
GenericOptionsParser parser = new GenericOptionsParser(conf,
fetcherOptions, args);
CommandLine cmd = parser.getCommandLine();
// get options
final String webUrl = cmd.hasOption(WEBSERVICE) ? cmd
.getOptionValue(WEBSERVICE) : null;
final String renewer = cmd.hasOption(RENEWER) ?
cmd.getOptionValue(RENEWER) : null;
final boolean cancel = cmd.hasOption(CANCEL);
final boolean renew = cmd.hasOption(RENEW);
final boolean print = cmd.hasOption(PRINT);
final boolean help = cmd.hasOption(HELP);
String[] remaining = parser.getRemainingArgs();
// check option validity
if (help) {
printUsage(System.out);
System.exit(0);
}
if (cancel && renew || cancel && print || renew && print || cancel && renew
&& print) {
System.err.println("ERROR: Only specify cancel, renew or print.");
printUsage(System.err);
}
if (remaining.length != 1 || remaining[0].charAt(0) == '-') {
System.err.println("ERROR: Must specify exacltly one token file");
printUsage(System.err);
}
// default to using the local file system
FileSystem local = FileSystem.getLocal(conf);
final Path tokenFile = new Path(local.getWorkingDirectory(), remaining[0]);
// Login the current user
UserGroupInformation.getCurrentUser().doAs(
new PrivilegedExceptionAction