
soot.toolkits.scalar.FlowAnalysis Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of soot Show documentation
Show all versions of soot Show documentation
A Java Optimization Framework
package soot.toolkits.scalar;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1997 - 1999 Raja Vallee-Rai
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* .
* #L%
*/
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.RandomAccess;
import java.util.Set;
import soot.baf.GotoInst;
import soot.jimple.GotoStmt;
import soot.options.Options;
import soot.toolkits.graph.DirectedGraph;
import soot.toolkits.graph.interaction.FlowInfo;
import soot.toolkits.graph.interaction.InteractionHandler;
import soot.util.Numberable;
import soot.util.PriorityQueue;
/**
* An abstract class providing a framework for carrying out dataflow analysis. Subclassing either BackwardFlowAnalysis or
* ForwardFlowAnalysis and providing implementations for the abstract methods will allow Soot to compute the corresponding
* flow analysis.
*/
public abstract class FlowAnalysis extends AbstractFlowAnalysis {
public enum Flow {
IN {
@Override
F getFlow(Entry, F> e) {
return e.inFlow;
}
},
OUT {
@Override
F getFlow(Entry, F> e) {
return e.outFlow;
}
};
abstract F getFlow(Entry, F> e);
}
static class Entry implements Numberable {
final D data;
int number;
/**
* This Entry is part of a real scc.
*/
boolean isRealStronglyConnected;
Entry[] in;
Entry[] out;
F inFlow;
F outFlow;
@SuppressWarnings("unchecked")
Entry(D u, Entry pred) {
in = new Entry[] { pred };
data = u;
number = Integer.MIN_VALUE;
isRealStronglyConnected = false;
}
@Override
public String toString() {
return data == null ? "" : data.toString();
}
@Override
public void setNumber(int n) {
number = n;
}
@Override
public int getNumber() {
return number;
}
}
static enum Orderer {
INSTANCE;
/**
* Creates a new {@code Entry} graph based on a {@code DirectedGraph}. This includes pseudo topological order, local
* access for predecessors and successors, a graph entry-point, a {@code Numberable} interface and a real strongly
* connected component marker.
*
* @param g
* @param gv
* @param entryFlow
* @return
*/
List> newUniverse(DirectedGraph g, GraphView gv, F entryFlow, boolean isForward) {
final int n = g.size();
Deque> s = new ArrayDeque>(n);
List> universe = new ArrayList>(n);
Map> visited = new HashMap>(((n + 1) * 4) / 3);
// out of universe node
Entry superEntry = new Entry(null, null);
List entries = null;
List actualEntries = gv.getEntries(g);
if (!actualEntries.isEmpty()) {
// normal cases: there is at least
// one return statement for a backward analysis
// or one entry statement for a forward analysis
entries = actualEntries;
} else {
// cases without any entry statement
if (isForward) {
// case of a forward flow analysis on
// a method without any entry point
throw new RuntimeException("error: no entry point for method in forward analysis");
} else {
// case of backward analysis on
// a method which potentially has
// an infinite loop and no return statement
entries = new ArrayList();
// a single head is expected
assert g.getHeads().size() == 1;
D head = g.getHeads().get(0);
Set visitedNodes = new HashSet();
List workList = new ArrayList();
D current = null;
// collect all 'goto' statements to catch the 'goto'
// from the infinite loop
workList.add(head);
while (!workList.isEmpty()) {
current = workList.remove(0);
visitedNodes.add(current);
// only add 'goto' statements
if (current instanceof GotoInst || current instanceof GotoStmt) {
entries.add(current);
}
for (D next : g.getSuccsOf(current)) {
if (visitedNodes.contains(next)) {
continue;
}
workList.add(next);
}
}
//
if (entries.isEmpty()) {
throw new RuntimeException("error: backward analysis on an empty entry set.");
}
}
}
visitEntry(visited, superEntry, entries);
superEntry.inFlow = entryFlow;
superEntry.outFlow = entryFlow;
@SuppressWarnings("unchecked")
Entry[] sv = new Entry[g.size()];
int[] si = new int[g.size()];
int index = 0;
int i = 0;
Entry v = superEntry;
for (;;) {
if (i < v.out.length) {
Entry w = v.out[i++];
// an unvisited child node
if (w.number == Integer.MIN_VALUE) {
w.number = s.size();
s.add(w);
visitEntry(visited, w, gv.getOut(g, w.data));
// save old
si[index] = i;
sv[index] = v;
index++;
i = 0;
v = w;
}
} else {
if (index == 0) {
assert universe.size() <= g.size();
Collections.reverse(universe);
return universe;
}
universe.add(v);
sccPop(s, v);
// restore old
index--;
v = sv[index];
i = si[index];
}
}
}
@SuppressWarnings("unchecked")
private Entry[] visitEntry(Map> visited, Entry v, List out) {
int n = out.size();
Entry[] a = new Entry[n];
assert (out instanceof RandomAccess);
for (int i = 0; i < n; i++) {
a[i] = getEntryOf(visited, out.get(i), v);
}
return v.out = a;
}
private Entry getEntryOf(Map> visited, D d, Entry v) {
// either we reach a new node or a merge node, the latter one is rare
// so put and restore should be better that a lookup
// putIfAbsent would be the ideal strategy
// add and restore if required
Entry newEntry = new Entry(d, v);
Entry oldEntry = visited.put(d, newEntry);
// no restore required
if (oldEntry == null) {
return newEntry;
}
// false prediction, restore the entry
visited.put(d, oldEntry);
// adding self ref (real strongly connected with itself)
if (oldEntry == v) {
oldEntry.isRealStronglyConnected = true;
}
// merge nodes are rare, so this is ok
int l = oldEntry.in.length;
oldEntry.in = Arrays.copyOf(oldEntry.in, l + 1);
oldEntry.in[l] = v;
return oldEntry;
}
private void sccPop(Deque> s, Entry v) {
int min = v.number;
for (Entry e : v.out) {
assert e.number > Integer.MIN_VALUE;
min = Math.min(min, e.number);
}
// not our SCC
if (min != v.number) {
v.number = min;
return;
}
// we only want real SCCs (size > 1)
Entry w = s.removeLast();
w.number = Integer.MAX_VALUE;
if (w == v) {
return;
}
w.isRealStronglyConnected = true;
for (;;) {
w = s.removeLast();
assert w.number >= v.number;
w.isRealStronglyConnected = true;
w.number = Integer.MAX_VALUE;
if (w == v) {
assert w.in.length >= 2;
return;
}
}
}
}
enum InteractionFlowHandler {
NONE, FORWARD {
@Override
public void handleFlowIn(FlowAnalysis a, N s) {
beforeEvent(stop(s), a, s);
}
@Override
public void handleFlowOut(FlowAnalysis a, N s) {
afterEvent(InteractionHandler.v(), a, s);
}
},
BACKWARD {
@Override
public void handleFlowIn(FlowAnalysis a, N s) {
afterEvent(stop(s), a, s);
}
@Override
public void handleFlowOut(FlowAnalysis a, N s) {
beforeEvent(InteractionHandler.v(), a, s);
}
};
void beforeEvent(InteractionHandler i, FlowAnalysis a, N s) {
A savedFlow = a.filterUnitToBeforeFlow.get(s);
if (savedFlow == null) {
savedFlow = a.newInitialFlow();
}
a.copy(a.unitToBeforeFlow.get(s), savedFlow);
i.handleBeforeAnalysisEvent(new FlowInfo(savedFlow, s, true));
}
void afterEvent(InteractionHandler i, FlowAnalysis a, N s) {
A savedFlow = a.filterUnitToAfterFlow.get(s);
if (savedFlow == null) {
savedFlow = a.newInitialFlow();
}
a.copy(a.unitToAfterFlow.get(s), savedFlow);
i.handleAfterAnalysisEvent(new FlowInfo(savedFlow, s, false));
}
InteractionHandler stop(Object s) {
InteractionHandler h = InteractionHandler.v();
List> stopList = h.getStopUnitList();
if (stopList != null && stopList.contains(s)) {
h.handleStopAtNodeEvent(s);
}
return h;
}
public void handleFlowIn(FlowAnalysis a, N s) {
}
public void handleFlowOut(FlowAnalysis a, N s) {
}
}
enum GraphView {
BACKWARD {
@Override
List getEntries(DirectedGraph g) {
return g.getTails();
}
@Override
List getOut(DirectedGraph g, N s) {
return g.getPredsOf(s);
}
},
FORWARD {
@Override
List getEntries(DirectedGraph g) {
return g.getHeads();
}
@Override
List getOut(DirectedGraph g, N s) {
return g.getSuccsOf(s);
}
};
abstract List getEntries(DirectedGraph g);
abstract List getOut(DirectedGraph g, N s);
}
/** Maps graph nodes to OUT sets. */
protected Map unitToAfterFlow;
/** Filtered: Maps graph nodes to OUT sets. */
protected Map filterUnitToAfterFlow = Collections.emptyMap();
/** Constructs a flow analysis on the given DirectedGraph
. */
public FlowAnalysis(DirectedGraph graph) {
super(graph);
unitToAfterFlow = new IdentityHashMap(graph.size() * 2 + 1);
}
/**
* Given the merge of the out
sets, compute the in
set for s
(or in to out,
* depending on direction).
*
* This function often causes confusion, because the same interface is used for both forward and backward flow analyses.
* The first parameter is always the argument to the flow function (i.e. it is the "in" set in a forward analysis and the
* "out" set in a backward analysis), and the third parameter is always the result of the flow function (i.e. it is the
* "out" set in a forward analysis and the "in" set in a backward analysis).
*
* @param in
* the input flow
* @param d
* the current node
* @param out
* the returned flow
**/
protected abstract void flowThrough(A in, N d, A out);
/** Accessor function returning value of OUT set for s. */
public A getFlowAfter(N s) {
A a = unitToAfterFlow.get(s);
return a == null ? newInitialFlow() : a;
}
@Override
public A getFlowBefore(N s) {
A a = unitToBeforeFlow.get(s);
return a == null ? newInitialFlow() : a;
}
private void initFlow(Iterable> universe, Map in, Map out) {
assert universe != null;
assert in != null;
assert out != null;
// If a node has only a single in-flow, the in-flow is always equal
// to the out-flow if its predecessor, so we use the same object.
// this saves memory and requires less object creation and copy calls.
// Furthermore a node can be marked as omissible, this allows us to use
// the same "flow-set" for out-flow and in-flow. A merge node with within
// a real scc cannot be omitted, as it could cause endless loops within
// the fixpoint-iteration!
for (Entry n : universe) {
boolean omit = true;
if (n.in.length > 1) {
n.inFlow = newInitialFlow();
// no merge points in loops
omit = !n.isRealStronglyConnected;
} else {
assert n.in.length == 1 : "missing superhead";
n.inFlow = getFlow(n.in[0], n);
assert n.inFlow != null : "topological order is broken";
}
if (omit && omissible(n.data)) {
// We could recalculate the graph itself but thats more expensive than
// just falling through such nodes.
n.outFlow = n.inFlow;
} else {
n.outFlow = newInitialFlow();
}
// for legacy api
in.put(n.data, n.inFlow);
out.put(n.data, n.outFlow);
}
}
/**
* If a flow node can be omitted return true
, otherwise false
. There is no guarantee a node will
* be omitted. A omissible node does not influence the result of an analysis.
*
* If you are unsure, don't overwrite this method
*
* @param n
* the node to check
* @return false
*/
protected boolean omissible(N n) {
return false;
}
/**
* You can specify which flow set you would like to use of node {@code from}
*
* @param from
* @param mergeNode
* @return Flow.OUT
*/
protected Flow getFlow(N from, N mergeNode) {
return Flow.OUT;
}
private A getFlow(Entry o, Entry e) {
return (o.inFlow == o.outFlow) ? o.outFlow : getFlow(o.data, e.data).getFlow(o);
}
private void meetFlows(Entry e) {
assert e.in.length >= 1;
if (e.in.length > 1) {
boolean copy = true;
for (Entry o : e.in) {
A flow = getFlow(o, e);
if (copy) {
copy = false;
copy(flow, e.inFlow);
} else {
mergeInto(e.data, e.inFlow, flow);
}
}
}
}
final int doAnalysis(GraphView gv, InteractionFlowHandler ifh, Map inFlow, Map outFlow) {
assert gv != null;
assert ifh != null;
ifh = Options.v().interactive_mode() ? ifh : InteractionFlowHandler.NONE;
final List> universe = Orderer.INSTANCE.newUniverse(graph, gv, entryInitialFlow(), isForward());
initFlow(universe, inFlow, outFlow);
Queue> q = PriorityQueue.of(universe, true);
// Perform fixed point flow analysis
for (int numComputations = 0;; numComputations++) {
Entry e = q.poll();
if (e == null) {
return numComputations;
}
meetFlows(e);
// Compute beforeFlow and store it.
ifh.handleFlowIn(this, e.data);
boolean hasChanged = flowThrough(e);
ifh.handleFlowOut(this, e.data);
// Update queue appropriately
if (hasChanged) {
q.addAll(Arrays.asList(e.out));
}
}
}
private boolean flowThrough(Entry d) {
// omitted, just fall through
if (d.inFlow == d.outFlow) {
assert !d.isRealStronglyConnected || d.in.length == 1;
return true;
}
if (d.isRealStronglyConnected) {
// A flow node that is influenced by at least one back-reference.
// It's essential to check if "flowThrough" changes the result.
// This requires the calculation of "equals", which itself
// can be really expensive - depending on the used flow-model.
// Depending on the "merge"+"flowThrough" costs, it can be cheaper
// to fall through. Only nodes with real back-references always
// need to be checked for changes
A out = newInitialFlow();
flowThrough(d.inFlow, d.data, out);
if (out.equals(d.outFlow)) {
return false;
}
// copy back the result, as it has changed
copy(out, d.outFlow);
return true;
}
// no back-references, just calculate "flowThrough"
flowThrough(d.inFlow, d.data, d.outFlow);
return true;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy