All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.flink.runtime.healthmanager.plugins.utils.JobTopologyAnalyzer Maven / Gradle / Ivy

/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.flink.runtime.healthmanager.plugins.utils;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.healthmanager.RestServerClient;
import org.apache.flink.runtime.jobgraph.JobVertexID;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;

/**
 * Utils to analyze topology structure of the job.
 */
public class JobTopologyAnalyzer {

	private Map> subDagRoot2SubDagVertex = new HashMap<>();
	private Map vertex2SubDagRoot = new HashMap<>();

	private Map> inputs = new HashMap<>();
	private Map> outputs = new HashMap<>();

	public void analyze(RestServerClient.JobConfig jobConfig) {

		subDagRoot2SubDagVertex = new HashMap<>();
		vertex2SubDagRoot = new HashMap<>();
		inputs = new HashMap<>();
		outputs = new HashMap<>();

		for (JobVertexID vertexId : jobConfig.getVertexConfigs().keySet()) {
			subDagRoot2SubDagVertex.put(vertexId, new ArrayList<>());
			subDagRoot2SubDagVertex.get(vertexId).add(vertexId);
			vertex2SubDagRoot.put(vertexId, vertexId);
			inputs.put(vertexId, new ArrayList<>());
			outputs.put(vertexId, new ArrayList<>());
		}

		for (JobVertexID vertexId : jobConfig.getInputNodes().keySet()) {
			List> upstreamVertices = jobConfig.getInputNodes().get(vertexId);

			for (Tuple2 entry : upstreamVertices) {
				inputs.get(vertexId).add(entry.f0);
				outputs.get(entry.f0).add(vertexId);
			}

			if (upstreamVertices.size() == 1) {
				// merge sub dag with upstream if there is only one input of current vertex.
				JobVertexID upstreamVertex = upstreamVertices.get(0).f0;
				JobVertexID upstreamSubDagRoot = vertex2SubDagRoot.get(upstreamVertex);

				// add downstream sub dag vertices to upstream sub dag and remove downstream sub dag
				for (JobVertexID subDagVertex : subDagRoot2SubDagVertex.get(vertexId)) {
					subDagRoot2SubDagVertex.get(upstreamSubDagRoot).add(subDagVertex);
					vertex2SubDagRoot.put(subDagVertex, upstreamSubDagRoot);
				}
				subDagRoot2SubDagVertex.remove(vertexId);

			}
		}
	}

	public boolean isSource(JobVertexID vertexID) {
		return inputs.get(vertexID).isEmpty();
	}

	public boolean isSink(JobVertexID vertexID) {
		return outputs.get(vertexID).isEmpty();
	}

	public List getInputs(JobVertexID vertexID) {
		return inputs.get(vertexID);
	}

	public List getOutputs(JobVertexID vertexID) {
		return outputs.get(vertexID);
	}

	public JobVertexID getSubDagRoot(JobVertexID vertexID) {
		return vertex2SubDagRoot.get(vertexID);
	}

	public List getSubDagVertices(JobVertexID vertexID) {
		return subDagRoot2SubDagVertex.get(vertex2SubDagRoot.get(vertexID));
	}

	public Set getAllSubDagRoots() {
		return subDagRoot2SubDagVertex.keySet();
	}
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy