Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.jsp.JspWriter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapred.JobHistory.JobInfo;
import org.apache.hadoop.mapred.JobTracker.RetireJobInfo;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.util.ServletUtil;
import org.apache.hadoop.util.StringUtils;
public class JSPUtil {
private static final String PRIVATE_ACTIONS_KEY = "webinterface.private.actions";
public static final Configuration conf = new Configuration();
//LRU based cache
private static final Map jobHistoryCache =
new LinkedHashMap();
private static final int CACHE_SIZE =
conf.getInt("mapred.job.tracker.jobhistory.lru.cache.size", 5);
private static final Log LOG = LogFactory.getLog(JSPUtil.class);
/**
* Method used to process the request from the job page based on the
* request which it has received. For example like changing priority.
*
* @param request HTTP request Object.
* @param response HTTP response object.
* @param tracker {@link JobTracker} instance
* @throws IOException
*/
public static void processButtons(HttpServletRequest request,
HttpServletResponse response, JobTracker tracker) throws IOException {
if (conf.getBoolean(PRIVATE_ACTIONS_KEY, false)
&& request.getParameter("killJobs") != null) {
String[] jobs = request.getParameterValues("jobCheckBox");
if (jobs != null) {
for (String job : jobs) {
tracker.killJob(JobID.forName(job));
}
}
}
if (conf.getBoolean(PRIVATE_ACTIONS_KEY, false) &&
request.getParameter("changeJobPriority") != null) {
String[] jobs = request.getParameterValues("jobCheckBox");
if (jobs != null) {
JobPriority jobPri = JobPriority.valueOf(request
.getParameter("setJobPriority"));
for (String job : jobs) {
tracker.setJobPriority(JobID.forName(job), jobPri);
}
}
}
}
/**
* Method used to generate the Job table for Job pages.
*
* @param label display heading to be used in the job table.
* @param jobs vector of jobs to be displayed in table.
* @param refresh refresh interval to be used in jobdetails page.
* @param rowId beginning row id to be used in the table.
* @return
* @throws IOException
*/
public static String generateJobTable(String label, Collection jobs
, int refresh, int rowId) throws IOException {
boolean isRunning = label.equals("Running");
boolean isModifiable =
isRunning && conf.getBoolean(PRIVATE_ACTIONS_KEY, false);
StringBuffer sb = new StringBuffer();
sb.append("
\n");
if (jobs.size() > 0) {
if (isModifiable) {
sb.append("
\n");
return sb.toString();
}
/**
* Given jobId, resolve the link to jobdetailshistory.jsp
* @param tracker JobTracker
* @param jobId JobID
* @return the link to the page jobdetailshistory.jsp for the job
*/
public static String getJobDetailsHistoryLink(JobTracker tracker,
String jobId) {
RetireJobInfo info = tracker.retireJobs.get(JobID.forName(jobId));
String historyFileUrl = getHistoryFileUrl(info);
String result = (historyFileUrl == null ? "" :
"jobdetailshistory.jsp?jobid=" + jobId + "&logFile=" +
historyFileUrl);
return result;
}
/**
* Given jobId, taskid resolve the link to taskdetailshistory.jsp
* @param tracker JobTracker
* @param jobId JobID
* @param tid String
* @return the link to the page jobdetailshistory.jsp for the job
*/
public static String getTaskDetailsHistoryLink(JobTracker tracker,
String jobId,
String tid) {
RetireJobInfo info = tracker.retireJobs.get(JobID.forName(jobId));
String historyFileUrl = getHistoryFileUrl(info);
String result = (historyFileUrl == null ? "" :
"taskdetailshistory.jsp?jobid=" + jobId + "&logFile=" +
historyFileUrl + "&taskid=" + tid);
return result;
}
/**
* Obtain history file URL from RetireJobInfo
* @param info RetireJobInfo
* @return corresponding history file url, null if cannot creat one
*/
private static String getHistoryFileUrl(RetireJobInfo info) {
String historyFile = info.getHistoryFile();
String historyFileUrl = null;
if (historyFile != null && !historyFile.equals("")) {
try {
historyFileUrl = URLEncoder.encode(info.getHistoryFile(), "UTF-8");
} catch (UnsupportedEncodingException e) {
LOG.warn("Can't create history url ", e);
}
}
return historyFileUrl;
}
@SuppressWarnings("unchecked")
public static String generateRetiredJobTable(JobTracker tracker, int rowId)
throws IOException {
StringBuffer sb = new StringBuffer();
sb.append("
\n");
Iterator iterator =
tracker.retireJobs.getAll().descendingIterator();
if (!iterator.hasNext()) {
sb.append("
none" +
"
\n");
} else {
sb.append("
");
sb.append("
Jobid
");
sb.append("
Priority
");
sb.append("
User
");
sb.append("
Name
");
sb.append("
State
");
sb.append("
Start Time
");
sb.append("
Finish Time
");
sb.append("
Map % Complete
");
sb.append("
Reduce % Complete
");
sb.append("
Job Scheduling Information
");
sb.append("
\n");
for (int i = 0; i < 100 && iterator.hasNext(); i++) {
RetireJobInfo info = iterator.next();
String historyFileUrl = getHistoryFileUrl(info);
sb.append("
\n");
return sb.toString();
}
/**
* Method used to generate the Job table for Job pages with resource
* utilization information obtain from {@link ResourceReporter}.
*
* @param label display heading to be used in the job table.
* @param jobs vector of jobs to be displayed in table.
* @param refresh refresh interval to be used in jobdetails page.
* @param rowId beginning row id to be used in the table.
* @return
* @throws IOException
*/
public static String generateJobTableWithResourceInfo(String label,
Collection jobs, int refresh, int rowId,
JobTracker tracker) throws IOException {
ResourceReporter reporter = tracker.getResourceReporter();
if (reporter == null) {
return generateJobTable(label, jobs, refresh, rowId);
}
boolean isRunning = label.equals("Running");
boolean isModifiable =
isRunning && conf.getBoolean(PRIVATE_ACTIONS_KEY, false);
StringBuffer sb = new StringBuffer();
sb.append("
\n");
if (jobs.size() > 0) {
if (isModifiable) {
sb.append("
\n");
return sb.toString();
}
/**
* Method used to generate the txt based Job table for Job pages.
*
* @param jobs vector of jobs to be displayed in table.
* @param colSeparator the char used to separate columns
* @param rowSeparator the char used to separate records
* @return a String contains the table
* @throws IOException
*/
public static String generateTxtJobTable(Collection jobs,
JobTracker tracker) throws IOException {
char colSeparator = '\t';
char rowSeparator = '\n';
StringBuffer sb = new StringBuffer();
sb.append("01.JOBID" + colSeparator +
"02.START" + colSeparator +
"03.FINISH" + colSeparator +
"04.USER" + colSeparator +
"05.NAME" + colSeparator +
"06.BLACK_TT" + colSeparator +
"07.PRIORITY" + colSeparator +
"08.MAP_TOTAL" + colSeparator +
"09.MAP_COMPLETE" + colSeparator +
"10.MAP_RUN" + colSeparator +
"11.MAP_SPECU" + colSeparator +
"12.MAP_NONLOC" + colSeparator +
"13.MAP_KILLED" + colSeparator +
"14.MAP_FAILED" + colSeparator +
"15.RED_TOTAL" + colSeparator +
"16.RED_COMPLETE" + colSeparator +
"17.RED_RUN" + colSeparator +
"18.RED_SPECU" + colSeparator +
"19.RED_KILLED" + colSeparator +
"20.RED_FAILED" + colSeparator +
"21.%MEM" + colSeparator +
"22.%MEM_MAX" + colSeparator +
"23.%MEM_PEAK" + colSeparator +
"24.MEM_MS" + colSeparator +
"25.%CPU" + colSeparator +
"26.%CPU_MAX" + colSeparator +
"27.CPU_MS" + rowSeparator);
if (jobs.size() > 0) {
for (Iterator it = jobs.iterator(); it.hasNext();) {
JobInProgress job = it.next();
JobProfile profile = job.getProfile();
String user = profile.getUser();
String name = profile.getJobName().
replace(' ', '_').replace('\t', '_').replace('\n', '_');
int desiredMaps = job.desiredMaps();
int desiredReduces = job.desiredReduces();
int runningMaps = 0;
int failedMaps = 0;
int killedMaps = 0;
for (TaskInProgress tip: job.getTasks(TaskType.MAP)) {
if (tip.isRunning()) {
runningMaps += tip.getActiveTasks().size();
tip.numKilledTasks();
failedMaps += tip.numTaskFailures();
killedMaps += tip.numKilledTasks();
}
}
int runningReduces = 0;
int failedReduces = 0;
int killedReduces = 0;
for (TaskInProgress tip: job.getTasks(TaskType.REDUCE)) {
if (tip.isRunning()) {
runningReduces += tip.getActiveTasks().size();
failedReduces += tip.numTaskFailures();
killedReduces += tip.numKilledTasks();
}
}
int completedMaps = job.finishedMaps();
int completedReduces = job.finishedReduces();
int nonLocalRunningMaps = job.getNonLocalRunningMaps().size();
long submitTime = job.getStartTime();
long finishTime = job.getFinishTime();
String jobpri = job.getPriority().toString();
JobID jobId = job.getJobID();
double mem = 0, memMax = 0, memMaxPeak = 0, memCost = 0;
double cpu = 0, cpuMax = 0, cpuCost = 0;
ResourceReporter reporter = tracker.getResourceReporter();
if (reporter != null) {
mem = reporter.getJobCpuPercentageOnCluster(jobId);
memMax = reporter.getJobMemMaxPercentageOnBox(jobId);
memMaxPeak = reporter.getJobMemMaxPercentageOnBoxAllTime(jobId);
memCost = reporter.getJobMemCumulatedUsageTime(jobId);
cpu = reporter.getJobCpuPercentageOnCluster(jobId);
cpuMax = reporter.getJobCpuMaxPercentageOnBox(jobId);
cpuCost = reporter.getJobCpuCumulatedUsageTime(jobId);
}
sb.append(jobId.toString() + colSeparator +
submitTime + colSeparator +
finishTime + colSeparator +
user + colSeparator +
name + colSeparator +
job.getNoOfBlackListedTrackers() + colSeparator +
jobpri + colSeparator +
desiredMaps + colSeparator +
completedMaps + colSeparator +
runningMaps + colSeparator +
job.speculativeMapTasks + colSeparator +
nonLocalRunningMaps + colSeparator +
killedMaps + colSeparator +
failedMaps + colSeparator +
desiredReduces + colSeparator +
completedReduces + colSeparator +
runningReduces + colSeparator +
job.speculativeReduceTasks + colSeparator +
killedReduces + colSeparator +
failedReduces + colSeparator +
mem + colSeparator +
memMax + colSeparator +
memMaxPeak + colSeparator +
memCost + colSeparator +
cpu + colSeparator +
cpuMax + colSeparator +
cpuCost + rowSeparator);
}
}
return sb.toString();
}
}