
org.apache.hyracks.control.cc.job.JobRun Maven / Gradle / Ivy
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.control.cc.job;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.apache.hyracks.api.dataflow.ActivityId;
import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
import org.apache.hyracks.api.dataflow.TaskId;
import org.apache.hyracks.api.dataflow.connectors.IConnectorPolicy;
import org.apache.hyracks.api.deployment.DeploymentId;
import org.apache.hyracks.api.exceptions.HyracksException;
import org.apache.hyracks.api.job.ActivityCluster;
import org.apache.hyracks.api.job.ActivityClusterGraph;
import org.apache.hyracks.api.job.ActivityClusterId;
import org.apache.hyracks.api.job.IActivityClusterGraphGenerator;
import org.apache.hyracks.api.job.JobFlag;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.api.job.JobStatus;
import org.apache.hyracks.api.partitions.PartitionId;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.partitions.PartitionMatchMaker;
import org.apache.hyracks.control.cc.scheduler.ActivityPartitionDetails;
import org.apache.hyracks.control.cc.scheduler.JobScheduler;
import org.apache.hyracks.control.common.job.profiling.om.JobProfile;
import org.apache.hyracks.control.common.utils.ExceptionUtils;
public class JobRun implements IJobStatusConditionVariable {
private final DeploymentId deploymentId;
private final JobId jobId;
private final IActivityClusterGraphGenerator acgg;
private final ActivityClusterGraph acg;
private final JobScheduler scheduler;
private final EnumSet jobFlags;
private final Map activityClusterPlanMap;
private final PartitionMatchMaker pmm;
private final Set participatingNodeIds;
private final Set cleanupPendingNodeIds;
private final JobProfile profile;
private final Map connectorPolicyMap;
private long createTime;
private long startTime;
private long endTime;
private JobStatus status;
private List exceptions;
private JobStatus pendingStatus;
private List pendingExceptions;
private Map> operatorLocations;
public JobRun(ClusterControllerService ccs, DeploymentId deploymentId, JobId jobId,
IActivityClusterGraphGenerator acgg, EnumSet jobFlags) {
this.deploymentId = deploymentId;
this.jobId = jobId;
this.acgg = acgg;
this.acg = acgg.initialize();
this.scheduler = new JobScheduler(ccs, this, acgg.getConstraints());
this.jobFlags = jobFlags;
activityClusterPlanMap = new HashMap();
pmm = new PartitionMatchMaker();
participatingNodeIds = new HashSet();
cleanupPendingNodeIds = new HashSet();
profile = new JobProfile(jobId);
connectorPolicyMap = new HashMap();
operatorLocations = new HashMap>();
}
public DeploymentId getDeploymentId() {
return deploymentId;
}
public JobId getJobId() {
return jobId;
}
public ActivityClusterGraph getActivityClusterGraph() {
return acg;
}
public EnumSet getFlags() {
return jobFlags;
}
public Map getActivityClusterPlanMap() {
return activityClusterPlanMap;
}
public PartitionMatchMaker getPartitionMatchMaker() {
return pmm;
}
public synchronized void setStatus(JobStatus status, List exceptions) {
this.status = status;
this.exceptions = exceptions;
notifyAll();
}
public synchronized JobStatus getStatus() {
return status;
}
public synchronized List getExceptions() {
return exceptions;
}
public void setPendingStatus(JobStatus status, List exceptions) {
this.pendingStatus = status;
this.pendingExceptions = exceptions;
}
public JobStatus getPendingStatus() {
return pendingStatus;
}
public synchronized List getPendingExceptions() {
return pendingExceptions;
}
public long getCreateTime() {
return createTime;
}
public void setCreateTime(long createTime) {
this.createTime = createTime;
}
public long getStartTime() {
return startTime;
}
public void setStartTime(long startTime) {
this.startTime = startTime;
}
public long getEndTime() {
return endTime;
}
public void setEndTime(long endTime) {
this.endTime = endTime;
}
public void registerOperatorLocation(OperatorDescriptorId op, int partition, String location) {
Map locations = operatorLocations.get(op);
if (locations == null) {
locations = new HashMap();
operatorLocations.put(op, locations);
}
locations.put(partition, location);
}
@Override
public synchronized void waitForCompletion() throws Exception {
while (status != JobStatus.TERMINATED && status != JobStatus.FAILURE) {
wait();
}
if (exceptions != null && !exceptions.isEmpty()) {
StringBuilder buffer = new StringBuilder();
buffer.append("Job failed on account of:\n");
for (Exception e : exceptions) {
buffer.append(e.getMessage()).append('\n');
}
HyracksException he;
he = new HyracksException(buffer.toString(), exceptions.get(0));
for (int i = 1; i < exceptions.size(); ++i) {
he.addSuppressed(exceptions.get(i));
}
throw he;
}
}
public Set getParticipatingNodeIds() {
return participatingNodeIds;
}
public Set getCleanupPendingNodeIds() {
return cleanupPendingNodeIds;
}
public JobProfile getJobProfile() {
return profile;
}
public JobScheduler getScheduler() {
return scheduler;
}
public Map getConnectorPolicyMap() {
return connectorPolicyMap;
}
public JSONObject toJSON() throws JSONException {
JSONObject result = new JSONObject();
result.put("job-id", jobId.toString());
result.put("status", getStatus());
result.put("create-time", getCreateTime());
result.put("start-time", getCreateTime());
result.put("end-time", getCreateTime());
JSONArray aClusters = new JSONArray();
for (ActivityCluster ac : acg.getActivityClusterMap().values()) {
JSONObject acJSON = new JSONObject();
acJSON.put("activity-cluster-id", String.valueOf(ac.getId()));
JSONArray activitiesJSON = new JSONArray();
for (ActivityId aid : ac.getActivityMap().keySet()) {
activitiesJSON.put(aid);
}
acJSON.put("activities", activitiesJSON);
JSONArray dependenciesJSON = new JSONArray();
for (ActivityCluster dependency : ac.getDependencies()) {
dependenciesJSON.put(String.valueOf(dependency.getId()));
}
acJSON.put("dependencies", dependenciesJSON);
ActivityClusterPlan acp = activityClusterPlanMap.get(ac.getId());
if (acp == null) {
acJSON.put("plan", (Object) null);
} else {
JSONObject planJSON = new JSONObject();
JSONArray acTasks = new JSONArray();
for (Map.Entry e : acp.getActivityPlanMap().entrySet()) {
ActivityPlan acPlan = e.getValue();
JSONObject entry = new JSONObject();
entry.put("activity-id", e.getKey().toString());
ActivityPartitionDetails apd = acPlan.getActivityPartitionDetails();
entry.put("partition-count", apd.getPartitionCount());
JSONArray inPartCountsJSON = new JSONArray();
int[] inPartCounts = apd.getInputPartitionCounts();
if (inPartCounts != null) {
for (int i : inPartCounts) {
inPartCountsJSON.put(i);
}
}
entry.put("input-partition-counts", inPartCountsJSON);
JSONArray outPartCountsJSON = new JSONArray();
int[] outPartCounts = apd.getOutputPartitionCounts();
if (outPartCounts != null) {
for (int o : outPartCounts) {
outPartCountsJSON.put(o);
}
}
entry.put("output-partition-counts", outPartCountsJSON);
JSONArray tasks = new JSONArray();
for (Task t : acPlan.getTasks()) {
JSONObject task = new JSONObject();
task.put("task-id", t.getTaskId().toString());
JSONArray dependentTasksJSON = new JSONArray();
for (TaskId dependent : t.getDependents()) {
dependentTasksJSON.put(dependent.toString());
}
task.put("dependents", dependentTasksJSON);
JSONArray dependencyTasksJSON = new JSONArray();
for (TaskId dependency : t.getDependencies()) {
dependencyTasksJSON.put(dependency.toString());
}
task.put("dependencies", dependencyTasksJSON);
tasks.put(task);
}
entry.put("tasks", tasks);
acTasks.put(entry);
}
planJSON.put("activities", acTasks);
JSONArray tClusters = new JSONArray();
for (TaskCluster tc : acp.getTaskClusters()) {
JSONObject c = new JSONObject();
c.put("task-cluster-id", String.valueOf(tc.getTaskClusterId()));
JSONArray tasks = new JSONArray();
for (Task t : tc.getTasks()) {
tasks.put(t.getTaskId().toString());
}
c.put("tasks", tasks);
JSONArray prodParts = new JSONArray();
for (PartitionId p : tc.getProducedPartitions()) {
prodParts.put(p.toString());
}
c.put("produced-partitions", prodParts);
JSONArray reqdParts = new JSONArray();
for (PartitionId p : tc.getRequiredPartitions()) {
reqdParts.put(p.toString());
}
c.put("required-partitions", reqdParts);
JSONArray attempts = new JSONArray();
List tcAttempts = tc.getAttempts();
if (tcAttempts != null) {
for (TaskClusterAttempt tca : tcAttempts) {
JSONObject attempt = new JSONObject();
attempt.put("attempt", tca.getAttempt());
attempt.put("status", tca.getStatus());
attempt.put("start-time", tca.getStartTime());
attempt.put("end-time", tca.getEndTime());
JSONArray taskAttempts = new JSONArray();
for (TaskAttempt ta : tca.getTaskAttempts().values()) {
JSONObject taskAttempt = new JSONObject();
taskAttempt.put("task-id", ta.getTaskAttemptId().getTaskId());
taskAttempt.put("task-attempt-id", ta.getTaskAttemptId());
taskAttempt.put("status", ta.getStatus());
taskAttempt.put("node-id", ta.getNodeId());
taskAttempt.put("start-time", ta.getStartTime());
taskAttempt.put("end-time", ta.getEndTime());
List exceptions = ta.getExceptions();
if (exceptions != null && !exceptions.isEmpty()) {
List filteredExceptions = ExceptionUtils.getActualExceptions(exceptions);
for (Exception exception : filteredExceptions) {
StringWriter exceptionWriter = new StringWriter();
exception.printStackTrace(new PrintWriter(exceptionWriter));
taskAttempt.put("failure-details", exceptionWriter.toString());
}
}
taskAttempts.put(taskAttempt);
}
attempt.put("task-attempts", taskAttempts);
attempts.put(attempt);
}
}
c.put("attempts", attempts);
tClusters.put(c);
}
planJSON.put("task-clusters", tClusters);
acJSON.put("plan", planJSON);
}
aClusters.put(acJSON);
}
result.put("activity-clusters", aClusters);
result.put("profile", profile.toJSON());
return result;
}
public Map> getOperatorLocations() {
return operatorLocations;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy