%@ page
contentType="text/html; charset=UTF-8"
import="javax.servlet.http.*"
import="java.io.*"
import="java.util.*"
import="org.apache.hadoop.fs.*"
import="org.apache.hadoop.mapred.*"
import="org.apache.hadoop.util.*"
import="java.text.SimpleDateFormat"
import="org.apache.hadoop.mapred.JobHistory.*"
%>
"/>
"/>
<%! static SimpleDateFormat dateFormat = new SimpleDateFormat("d-MMM-yyyy HH:mm:ss") ; %>
<%
String jobid = request.getParameter("jobid");
String logFile = request.getParameter("logFile");
String encodedLogFileName = JobHistory.JobInfo.encodeJobHistoryFilePath(logFile);
Path jobFile = new Path(logFile);
String[] jobDetails = jobFile.getName().split("_");
String jobUniqueString = jobDetails[0] + "_" +jobDetails[1] + "_" + jobid ;
JobInfo job = (JobInfo)request.getSession().getAttribute("job");
FileSystem fs = (FileSystem)request.getSession().getAttribute("fs");
%>
User: <%=job.get(Keys.USER) %>
JobName: <%=job.get(Keys.JOBNAME) %>
JobConf:
<%=job.get(Keys.JOBCONF) %>
Submitted At: <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.SUBMIT_TIME), 0 ) %>
Launched At: <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.LAUNCH_TIME), job.getLong(Keys.SUBMIT_TIME)) %>
Finished At: <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.FINISH_TIME), job.getLong(Keys.LAUNCH_TIME)) %>
Status: <%= ((job.get(Keys.JOB_STATUS) == "")?"Incomplete" :job.get(Keys.JOB_STATUS)) %>
<%
Map tasks = job.getAllTasks();
int totalMaps = 0 ;
int totalReduces = 0;
int numFailedMaps = 0;
int numKilledMaps = 0;
int numFailedReduces = 0 ;
int numKilledReduces = 0;
long mapStarted = 0 ;
long mapFinished = 0 ;
long reduceStarted = 0 ;
long reduceFinished = 0;
Map allHosts = new TreeMap();
for (JobHistory.Task task : tasks.values()) {
Map attempts = task.getTaskAttempts();
allHosts.put(task.get(Keys.HOSTNAME), "");
for (TaskAttempt attempt : attempts.values()) {
long startTime = attempt.getLong(Keys.START_TIME) ;
long finishTime = attempt.getLong(Keys.FINISH_TIME) ;
if (Values.MAP.name().equals(task.get(Keys.TASK_TYPE))){
if (mapStarted==0 || mapStarted > startTime ) {
mapStarted = startTime;
}
if (mapFinished < finishTime ) {
mapFinished = finishTime ;
}
totalMaps++;
if (Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS))) {
numFailedMaps++;
} else if (Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS))) {
numKilledMaps++;
}
} else {
if (reduceStarted==0||reduceStarted > startTime) {
reduceStarted = startTime ;
}
if (reduceFinished < finishTime) {
reduceFinished = finishTime;
}
totalReduces++;
if (Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS))) {
numFailedReduces++;
} else if (Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS))) {
numKilledReduces++;
}
}
}
}
%>
Analyse This Job
<%
DefaultJobHistoryParser.FailedOnNodesFilter filter =
new DefaultJobHistoryParser.FailedOnNodesFilter();
JobHistory.parseHistoryFromFS(logFile, filter, fs);
Map> badNodes = filter.getValues();
if (badNodes.size() > 0) {
%>
Failed tasks attempts by nodes
Hostname | Failed Tasks |
<%
for (Map.Entry> entry : badNodes.entrySet()) {
String node = entry.getKey();
Set failedTasks = entry.getValue();
%>
<%=node %> |
<%
for (String t : failedTasks) {
%>
<%=t %>,
<%
}
%>
|
<%
}
}
%>
<%
DefaultJobHistoryParser.KilledOnNodesFilter killedFilter =
new DefaultJobHistoryParser.KilledOnNodesFilter();
JobHistory.parseHistoryFromFS(logFile, filter, fs);
badNodes = killedFilter.getValues();
if (badNodes.size() > 0) {
%>
Killed tasks attempts by nodes
Hostname | Killed Tasks |
<%
for (Map.Entry> entry : badNodes.entrySet()) {
String node = entry.getKey();
Set killedTasks = entry.getValue();
%>
<%=node %> |
<%
for (String t : killedTasks) {
%>
<%=t %>,
<%
}
%>
|
<%
}
}
%>