From 5f5b1508b74d989403b3708dd4d51aed991e0006 Mon Sep 17 00:00:00 2001 From: jjenista Date: Wed, 2 Dec 2009 23:14:44 +0000 Subject: [PATCH] start of a revised disjoint reachability analysis --- Robust/src/Analysis/Disjoint/AllocSite.java | 209 + Robust/src/Analysis/Disjoint/Canonical.java | 27 + .../Analysis/Disjoint/CanonicalWrapper.java | 18 + Robust/src/Analysis/Disjoint/ChangeSet.java | 109 + Robust/src/Analysis/Disjoint/ChangeTuple.java | 76 + .../Analysis/Disjoint/DisjointAnalysis.java | 1077 ++++ .../src/Analysis/Disjoint/HeapRegionNode.java | 234 + Robust/src/Analysis/Disjoint/ReachGraph.java | 4926 +++++++++++++++++ Robust/src/Analysis/Disjoint/ReachSet.java | 539 ++ Robust/src/Analysis/Disjoint/ReachState.java | 442 ++ Robust/src/Analysis/Disjoint/ReachTuple.java | 144 + Robust/src/Analysis/Disjoint/RefEdge.java | 274 + Robust/src/Analysis/Disjoint/RefSrcNode.java | 57 + .../src/Analysis/Disjoint/VariableNode.java | 43 + Robust/src/IR/State.java | 9 + Robust/src/Main/Main.java | 37 + Robust/src/Makefile | 9 +- Robust/src/Tests/disjoint/simple/makefile | 27 + Robust/src/Tests/disjoint/simple/test.java | 8 + 19 files changed, 8262 insertions(+), 3 deletions(-) create mode 100644 Robust/src/Analysis/Disjoint/AllocSite.java create mode 100644 Robust/src/Analysis/Disjoint/Canonical.java create mode 100644 Robust/src/Analysis/Disjoint/CanonicalWrapper.java create mode 100644 Robust/src/Analysis/Disjoint/ChangeSet.java create mode 100644 Robust/src/Analysis/Disjoint/ChangeTuple.java create mode 100644 Robust/src/Analysis/Disjoint/DisjointAnalysis.java create mode 100644 Robust/src/Analysis/Disjoint/HeapRegionNode.java create mode 100644 Robust/src/Analysis/Disjoint/ReachGraph.java create mode 100644 Robust/src/Analysis/Disjoint/ReachSet.java create mode 100644 Robust/src/Analysis/Disjoint/ReachState.java create mode 100644 Robust/src/Analysis/Disjoint/ReachTuple.java create mode 100644 Robust/src/Analysis/Disjoint/RefEdge.java create mode 100644 Robust/src/Analysis/Disjoint/RefSrcNode.java create mode 100644 Robust/src/Analysis/Disjoint/VariableNode.java create mode 100644 Robust/src/Tests/disjoint/simple/makefile create mode 100644 Robust/src/Tests/disjoint/simple/test.java diff --git a/Robust/src/Analysis/Disjoint/AllocSite.java b/Robust/src/Analysis/Disjoint/AllocSite.java new file mode 100644 index 00000000..22bcb8bb --- /dev/null +++ b/Robust/src/Analysis/Disjoint/AllocSite.java @@ -0,0 +1,209 @@ +package Analysis.DisjointAnalysis; + +import IR.*; +import IR.Flat.*; +import java.util.*; + +// allocation sites are independent of any particular +// ownership graph, unlike most of the other elements +// of the ownership analysis. An allocation site is +// simply a collection of heap region identifiers that +// are associated with a single allocation site in the +// program under analysis. + +// So two different ownership graphs may incorporate +// nodes that represent the memory from one allocation +// site. In this case there are two different sets of +// HeapRegionNode objects, but they have the same +// node identifiers, and there is one AllocSite +// object associated with the FlatNew node that gives +// the graphs the identifiers in question. + +public class AllocSite { + + static private int uniqueIDcount = 0; + + protected Integer id; + protected int allocationDepth; + protected Vector ithOldest; + protected Integer summary; + protected FlatNew flatNew; + protected String disjointId; + + public static final int AGE_notInThisSite = 100; + public static final int AGE_in_I = 101; + public static final int AGE_oldest = 102; + public static final int AGE_summary = 103; + + public static final int SHADOWAGE_notInThisSite = -100; + public static final int SHADOWAGE_in_I = -101; + public static final int SHADOWAGE_oldest = -102; + public static final int SHADOWAGE_summary = -103; + + private boolean flag=false; + + + public AllocSite(int allocationDepth, FlatNew flatNew, String disjointId) { + assert allocationDepth >= 1; + + this.allocationDepth = allocationDepth; + this.flatNew = flatNew; + this.disjointId = disjointId; + + ithOldest = new Vector(allocationDepth); + id = generateUniqueAllocSiteID(); + } + + static public Integer generateUniqueAllocSiteID() { + ++uniqueIDcount; + return new Integer(uniqueIDcount); + } + + + public String getDisjointAnalysisId() { + return disjointId; + } + + + public int getAllocationDepth() { + return allocationDepth; + } + + public void setIthOldest(int i, Integer id) { + assert i >= 0; + assert i < allocationDepth; + assert id != null; + + ithOldest.add(i, id); + } + + public Integer getIthOldest(int i) { + assert i >= 0; + assert i < allocationDepth; + + return ithOldest.get(i); + } + + public Integer getIthOldestShadow(int i) { + assert i >= 0; + assert i < allocationDepth; + + return -ithOldest.get(i); + } + + public Integer getOldest() { + return ithOldest.get(allocationDepth - 1); + } + + public Integer getOldestShadow() { + return -ithOldest.get(allocationDepth - 1); + } + + public void setSummary(Integer id) { + assert id != null; + summary = id; + } + + public Integer getSummary() { + return summary; + } + + public Integer getSummaryShadow() { + return -summary; + } + + public FlatNew getFlatNew() { + return flatNew; + } + + public TypeDescriptor getType() { + return flatNew.getType(); + } + + public int getAgeCategory(Integer id) { + + if( id.equals(summary) ) { + return AGE_summary; + } + + if( id.equals(getOldest() ) ) { + return AGE_oldest; + } + + for( int i = 0; i < allocationDepth - 1; ++i ) { + if( id.equals(ithOldest.get(i) ) ) { + return AGE_in_I; + } + } + + return AGE_notInThisSite; + } + + public Integer getAge(Integer id) { + for( int i = 0; i < allocationDepth - 1; ++i ) { + if( id.equals(ithOldest.get(i) ) ) { + return new Integer(i); + } + } + + return null; + } + + public int getShadowAgeCategory(Integer id) { + if( id.equals(-summary) ) { + return SHADOWAGE_summary; + } + + if( id.equals(getOldestShadow() ) ) { + return SHADOWAGE_oldest; + } + + for( int i = 0; i < allocationDepth - 1; ++i ) { + if( id.equals(getIthOldestShadow(i) ) ) { + return SHADOWAGE_in_I; + } + } + + return SHADOWAGE_notInThisSite; + } + + public Integer getShadowAge(Integer id) { + for( int i = 0; i < allocationDepth - 1; ++i ) { + if( id.equals(getIthOldestShadow(i) ) ) { + return new Integer(-i); + } + } + + return null; + } + + public String toString() { + if( disjointId == null ) { + return "allocSite" + id; + } + return "allocSite "+disjointId+" ("+id+")"; + } + + public String toStringVerbose() { + if( disjointId == null ) { + return "allocSite" + id + " "+flatNew.getType().toPrettyString(); + } + return "allocSite "+disjointId+" ("+id+") "+flatNew.getType().toPrettyString(); + } + + public String toStringForDOT() { + if( disjointId != null ) { + return "disjoint "+disjointId+"\\n"+toString()+"\\n"+getType().toPrettyString(); + } else { + return toString()+"\\n"+getType().toPrettyString(); + } + } + + public void setFlag(boolean flag){ + this.flag=flag; + } + + public boolean getFlag(){ + return flag; + } +} diff --git a/Robust/src/Analysis/Disjoint/Canonical.java b/Robust/src/Analysis/Disjoint/Canonical.java new file mode 100644 index 00000000..981bb657 --- /dev/null +++ b/Robust/src/Analysis/Disjoint/Canonical.java @@ -0,0 +1,27 @@ +package Analysis.DisjointAnalysis; + +import IR.*; +import IR.Flat.*; +import java.util.*; +import java.io.*; + +public class Canonical { + + private static Hashtable canon = new Hashtable(); + + int canonicalvalue; + private static int canonicalcount = 1; + + public static Canonical makeCanonical( Canonical c ) { + if( canon.containsKey(c) ) { + return canon.get(c); + } + c.canonicalvalue=canonicalcount++; + canon.put(c, c); + return c; + } + + static Hashtable unionhash=new Hashtable(); + static Hashtable interhash=new Hashtable(); + static Hashtable lookuphash=new Hashtable(); +} diff --git a/Robust/src/Analysis/Disjoint/CanonicalWrapper.java b/Robust/src/Analysis/Disjoint/CanonicalWrapper.java new file mode 100644 index 00000000..4f8f4075 --- /dev/null +++ b/Robust/src/Analysis/Disjoint/CanonicalWrapper.java @@ -0,0 +1,18 @@ +package Analysis.DisjointAnalysis; + +public class CanonicalWrapper { + Canonical a; + public Canonical b; + + public CanonicalWrapper(Canonical a) { + assert a.canonicalvalue!=0; + this.a=a; + } + public int hashCode() { + return a.canonicalvalue; + } + public boolean equals(Object o) { + CanonicalWrapper ro=(CanonicalWrapper)o; + return ro.a.canonicalvalue==a.canonicalvalue; + } +} \ No newline at end of file diff --git a/Robust/src/Analysis/Disjoint/ChangeSet.java b/Robust/src/Analysis/Disjoint/ChangeSet.java new file mode 100644 index 00000000..4f343a0e --- /dev/null +++ b/Robust/src/Analysis/Disjoint/ChangeSet.java @@ -0,0 +1,109 @@ +package Analysis.DisjointAnalysis; + +import IR.*; +import IR.Flat.*; +import java.util.*; +import java.io.*; + + +public class ChangeSet extends Canonical { + + private HashSet changeTuples; + + public ChangeSet() { + changeTuples = new HashSet(); + } + + public ChangeSet(ChangeTuple ct) { + this(); + changeTuples.add(ct); + } + + public ChangeSet(ChangeSet cts) { + changeTuples = (HashSet)cts.changeTuples.clone(); + } + + public ChangeSet makeCanonical() { + return (ChangeSet) Canonical.makeCanonical(this); + } + + public Iterator iterator() { + return changeTuples.iterator(); + } + + public int size() { + return changeTuples.size(); + } + + public ChangeSet union(ChangeSet ctsIn) { + assert ctsIn != null; + + ChangeSet ctsOut = new ChangeSet(this); + ctsOut.changeTuples.addAll(ctsIn.changeTuples); + return ctsOut.makeCanonical(); + } + + public ChangeSet union(ChangeTuple ctIn) { + assert ctIn != null; + + ChangeSet ctsOut = new ChangeSet(this); + ctsOut.changeTuples.add(ctIn); + return ctsOut.makeCanonical(); + } + + public boolean isEmpty() { + return changeTuples.isEmpty(); + } + + public boolean isSubset(ChangeSet ctsIn) { + assert ctsIn != null; + return ctsIn.changeTuples.containsAll(this.changeTuples); + } + + + public boolean equals(Object o) { + if( o == null ) { + return false; + } + + if( !(o instanceof ChangeSet) ) { + return false; + } + + ChangeSet cts = (ChangeSet) o; + return changeTuples.equals(cts.changeTuples); + } + + private boolean oldHashSet = false; + private int oldHash = 0; + public int hashCode() { + int currentHash = changeTuples.hashCode(); + + if( oldHashSet == false ) { + oldHash = currentHash; + oldHashSet = true; + } else { + if( oldHash != currentHash ) { + System.out.println("IF YOU SEE THIS A CANONICAL ChangeSet CHANGED"); + Integer x = null; + x.toString(); + } + } + + return currentHash; + } + + + public String toString() { + String s = "["; + + Iterator i = this.iterator(); + while( i.hasNext() ) { + s += "\n "+i.next(); + } + + s += "\n]"; + + return s; + } +} diff --git a/Robust/src/Analysis/Disjoint/ChangeTuple.java b/Robust/src/Analysis/Disjoint/ChangeTuple.java new file mode 100644 index 00000000..b4ddfd37 --- /dev/null +++ b/Robust/src/Analysis/Disjoint/ChangeTuple.java @@ -0,0 +1,76 @@ +package Analysis.DisjointAnalysis; + +import IR.*; +import IR.Flat.*; +import java.util.*; +import java.io.*; + + +// a change touple is a pair that indicates if the +// first ReachState is found in a ReachSet, +// then the second ReachState should be added + +// THIS CLASS IS IMMUTABLE! + +public class ChangeTuple extends Canonical +{ + private ReachState toMatch; + private ReachState toAdd; + + public ChangeTuple(ReachState toMatch, + ReachState toAdd) { + this.toMatch = toMatch; + this.toAdd = toAdd; + } + + public ChangeTuple makeCanonical() { + return (ChangeTuple) Canonical.makeCanonical(this); + } + + public ReachState getSetToMatch() { + return toMatch; + } + public ReachState getSetToAdd() { + return toAdd; + } + + + public boolean equals(Object o) { + if( o == null ) { + return false; + } + + if( !(o instanceof ChangeTuple) ) { + return false; + } + + ChangeTuple ct = (ChangeTuple) o; + + return toMatch.equals(ct.getSetToMatch() ) && + toAdd.equals(ct.getSetToAdd() ); + } + + private boolean oldHashSet = false; + private int oldHash = 0; + public int hashCode() { + int currentHash = toMatch.hashCode() + toAdd.hashCode()*3; + + if( oldHashSet == false ) { + oldHash = currentHash; + oldHashSet = true; + } else { + if( oldHash != currentHash ) { + System.out.println("IF YOU SEE THIS A CANONICAL ChangeTuple CHANGED"); + Integer x = null; + x.toString(); + } + } + + return currentHash; + } + + + public String toString() { + return new String("("+toMatch+" -> "+toAdd+")"); + } +} diff --git a/Robust/src/Analysis/Disjoint/DisjointAnalysis.java b/Robust/src/Analysis/Disjoint/DisjointAnalysis.java new file mode 100644 index 00000000..e7e53e34 --- /dev/null +++ b/Robust/src/Analysis/Disjoint/DisjointAnalysis.java @@ -0,0 +1,1077 @@ +package Analysis.Disjoint; + +import Analysis.CallGraph.*; +import Analysis.Liveness; +import Analysis.ArrayReferencees; +import IR.*; +import IR.Flat.*; +import IR.Tree.Modifiers; +import java.util.*; +import java.io.*; + + +public class DisjointAnalysis { + + + // data from the compiler + public State state; + public CallGraph callGraph; + public Liveness liveness; + public ArrayReferencees arrayReferencees; + public TypeUtil typeUtil; + public int allocationDepth; + + + // used to identify HeapRegionNode objects + // A unique ID equates an object in one + // ownership graph with an object in another + // graph that logically represents the same + // heap region + // start at 10 and increment to reserve some + // IDs for special purposes + static private int uniqueIDcount = 10; + + + // An out-of-scope method created by the + // analysis that has no parameters, and + // appears to allocate the command line + // arguments, then invoke the source code's + // main method. The purpose of this is to + // provide the analysis with an explicit + // top-level context with no parameters + private MethodDescriptor mdAnalysisEntry; + private FlatMethod fmAnalysisEntry; + + // main method defined by source program + private MethodDescriptor mdSourceEntry; + + // the set of task and/or method descriptors + // reachable in call graph + private Set descriptorsToAnalyze; + + + // for controlling DOT file output + private boolean writeFinalDOTs; + private boolean writeAllIncrementalDOTs; + + + // this analysis generates a disjoint reachability + // graph for every reachable method in the program + public DisjointAnalysis( State s, + TypeUtil tu, + CallGraph cg, + Liveness l, + ArrayReferencees ar + ) throws java.io.IOException { + init( s, tu, cg, l, ar ); + } + + private void init( State state, + TypeUtil typeUtil, + CallGraph callGraph, + Liveness liveness, + ArrayReferencees arrayReferencees + ) throws java.io.IOException { + + this.state = state; + this.typeUtil = typeUtil; + this.callGraph = callGraph; + this.liveness = liveness; + this.arrayReferencees = arrayReferencees; + this.allocationDepth = state.DISJOINTALLOCDEPTH; + this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL; + this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL; + + + // set some static configuration for ReachGraphs + ReachGraph.allocationDepth = allocationDepth; + ReachGraph.typeUtil = typeUtil; + + + // This analysis does not support Bamboo at the moment, + // but if it does in the future we would initialize the + // set of descriptors to analyze as the program-reachable + // tasks and the methods callable by them. For Java, + // just methods reachable from the main method. + assert !state.TASK; + descriptorsToAnalyze = new HashSet(); + + + double timeStartAnalysis = (double) System.nanoTime(); + + // start interprocedural fixed-point computation + analyzeMethods(); + + double timeEndAnalysis = (double) System.nanoTime(); + double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow( 10.0, 9.0 ) ); + String treport = String.format( "The reachability analysis took %.3f sec.", dt ); + String justtime = String.format( "%.2f", dt ); + System.out.println( treport ); + + if( writeFinalDOTs && !writeAllIncrementalDOTs ) { + //writeFinalContextGraphs(); + } + + if( state.DISJOINTALIASFILE != null ) { + if( state.TASK ) { + // not supporting tasks yet... + } else { + /* + writeAllAliasesJava( aliasFile, + treport, + justtime, + state.DISJOINTALIASTAB, + state.lines ); + */ + } + } + } + + + + // fixed-point computation over the call graph--when a + // method's callees are updated, it must be reanalyzed + private void analyzeMethods() throws java.io.IOException { + + mdSourceEntry = typeUtil.getMain(); + + // add all methods transitively reachable from the + // source's main to set for analysis + descriptorsToAnalyze.add( mdSourceEntry ); + descriptorsToAnalyze.addAll( + callGraph.getAllMethods( mdSourceEntry ) + ); + + // fabricate an empty calling context that will call + // the source's main, but call graph doesn't know + // about it, so explicitly add it + makeAnalysisEntryMethod( mdSourceEntry ); + descriptorsToAnalyze.add( mdAnalysisEntry ); + + + // topologically sort according to the call graph so + // leaf calls are ordered first, smarter analysis order + LinkedList sortedDescriptors = + topologicalSort( descriptorsToAnalyze ); + + + System.out.println( "topological:\n"+sortedDescriptors ); + + + /* + methodContextsToVisitQ = new PriorityQueue(); + methodContextsToVisitSet = new HashSet(); + + int p = 0; + Iterator mcItr = sortedMethodContexts.iterator(); + while( mcItr.hasNext() ) { + MethodContext mc = mcItr.next(); + mapDescriptorToPriority.put( mc.getDescriptor(), new Integer( p ) ); + methodContextsToVisitQ.add( new MethodContextQWrapper( p, mc ) ); + methodContextsToVisitSet.add( mc ); + ++p; + } + + // analyze methods from the priority queue until it is empty + while( !methodContextsToVisitQ.isEmpty() ) { + MethodContext mc = methodContextsToVisitQ.poll().getMethodContext(); + assert methodContextsToVisitSet.contains( mc ); + methodContextsToVisitSet.remove( mc ); + + // because the task or method descriptor just extracted + // was in the "to visit" set it either hasn't been analyzed + // yet, or some method that it depends on has been + // updated. Recompute a complete ownership graph for + // this task/method and compare it to any previous result. + // If there is a change detected, add any methods/tasks + // that depend on this one to the "to visit" set. + + System.out.println("Analyzing " + mc); + + Descriptor d = mc.getDescriptor(); + FlatMethod fm; + if( d instanceof MethodDescriptor ) { + fm = state.getMethodFlat( (MethodDescriptor) d); + } else { + assert d instanceof TaskDescriptor; + fm = state.getMethodFlat( (TaskDescriptor) d); + } + + ReachGraph og = analyzeFlatMethod(mc, fm); + ReachGraph ogPrev = mapMethodContextToCompleteReachabilityGraph.get(mc); + if( !og.equals(ogPrev) ) { + setGraphForMethodContext(mc, og); + + Iterator depsItr = iteratorDependents( mc ); + while( depsItr.hasNext() ) { + MethodContext mcNext = depsItr.next(); + + if( !methodContextsToVisitSet.contains( mcNext ) ) { + methodContextsToVisitQ.add( new MethodContextQWrapper( mapDescriptorToPriority.get( mcNext.getDescriptor() ), + mcNext ) ); + methodContextsToVisitSet.add( mcNext ); + } + } + } + } + */ + } + + /* + // keep passing the Descriptor of the method along for debugging + // and dot file writing + private ReachGraph + analyzeFlatMethod(MethodContext mc, + FlatMethod flatm) throws java.io.IOException { + + // initialize flat nodes to visit as the flat method + // because it is the entry point + + flatNodesToVisit = new HashSet(); + flatNodesToVisit.add(flatm); + + // initilize the mapping of flat nodes in this flat method to + // ownership graph results to an empty mapping + mapFlatNodeToReachabilityGraph = new Hashtable(); + + // initialize the set of return nodes that will be combined as + // the final ownership graph result to return as an empty set + returnNodesToCombineForCompleteReachabilityGraph = new HashSet(); + + + while( !flatNodesToVisit.isEmpty() ) { + FlatNode fn = (FlatNode) flatNodesToVisit.iterator().next(); + flatNodesToVisit.remove(fn); + + //System.out.println( " "+fn ); + + // perform this node's contributions to the ownership + // graph on a new copy, then compare it to the old graph + // at this node to see if anything was updated. + ReachGraph og = new ReachGraph(); + + // start by merging all node's parents' graphs + for( int i = 0; i < fn.numPrev(); ++i ) { + FlatNode pn = fn.getPrev(i); + if( mapFlatNodeToReachabilityGraph.containsKey(pn) ) { + ReachabilityGraph ogParent = mapFlatNodeToReachabilityGraph.get(pn); + og.merge(ogParent); + } + } + + // apply the analysis of the flat node to the + // ownership graph made from the merge of the + // parent graphs + og = analyzeFlatNode(mc, + flatm, + fn, + returnNodesToCombineForCompleteReachabilityGraph, + og); + + + + + if( takeDebugSnapshots && + mc.getDescriptor().getSymbol().equals( mcDescSymbolDebug ) ) { + debugSnapshot(og,fn); + } + + + // if the results of the new graph are different from + // the current graph at this node, replace the graph + // with the update and enqueue the children for + // processing + ReachGraph ogPrev = mapFlatNodeToReachabilityGraph.get(fn); + if( !og.equals(ogPrev) ) { + mapFlatNodeToReachabilityGraph.put(fn, og); + + for( int i = 0; i < fn.numNext(); i++ ) { + FlatNode nn = fn.getNext(i); + flatNodesToVisit.add(nn); + } + } + } + + // end by merging all return nodes into a complete + // ownership graph that represents all possible heap + // states after the flat method returns + ReachGraph completeGraph = new ReachGraph(); + Iterator retItr = returnNodesToCombineForCompleteReachabilityGraph.iterator(); + while( retItr.hasNext() ) { + FlatReturnNode frn = (FlatReturnNode) retItr.next(); + assert mapFlatNodeToReachabilityGraph.containsKey(frn); + ReachGraph ogr = mapFlatNodeToReachabilityGraph.get(frn); + completeGraph.merge(ogr); + } + + return completeGraph; + } + + + private ReachGraph + analyzeFlatNode(MethodContext mc, + FlatMethod fmContaining, + FlatNode fn, + HashSet setRetNodes, + ReachGraph og) throws java.io.IOException { + + + // any variables that are no longer live should be + // nullified in the graph to reduce edges + // NOTE: it is not clear we need this. It costs a + // liveness calculation for every method, so only + // turn it on if we find we actually need it. + //og.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) ); + + + TempDescriptor lhs; + TempDescriptor rhs; + FieldDescriptor fld; + + // use node type to decide what alterations to make + // to the ownership graph + switch( fn.kind() ) { + + case FKind.FlatMethod: + FlatMethod fm = (FlatMethod) fn; + + // there should only be one FlatMethod node as the + // parent of all other FlatNode objects, so take + // the opportunity to construct the initial graph by + // adding parameters labels to new heap regions + // AND this should be done once globally so that the + // parameter IDs are consistent between analysis + // iterations, so if this step has been done already + // just merge in the cached version + ReachGraph ogInitParamAlloc = mapMethodContextToInitialParamAllocGraph.get(mc); + if( ogInitParamAlloc == null ) { + + // if the method context has aliased parameters, make sure + // there is a blob region for all those param to reference + Set aliasedParamIndices = mc.getAliasedParamIndices(); + + if( !aliasedParamIndices.isEmpty() ) { + og.makeAliasedParamHeapRegionNode(fm); + } + + // set up each parameter + for( int i = 0; i < fm.numParameters(); ++i ) { + TempDescriptor tdParam = fm.getParameter( i ); + TypeDescriptor typeParam = tdParam.getType(); + Integer paramIndex = new Integer( i ); + + if( typeParam.isImmutable() && !typeParam.isArray() ) { + // don't bother with this primitive parameter, it + // cannot affect reachability + continue; + } + + if( aliasedParamIndices.contains( paramIndex ) ) { + // use the alias blob but give parameters their + // own primary obj region + og.assignTempEqualToAliasedParam( tdParam, + paramIndex, fm ); + } else { + // this parameter is not aliased to others, give it + // a fresh primary obj and secondary object + og.assignTempEqualToParamAlloc( tdParam, + mc.getDescriptor() instanceof TaskDescriptor, + paramIndex, fm ); + } + } + + // add additional edges for aliased regions if necessary + if( !aliasedParamIndices.isEmpty() ) { + og.addParam2ParamAliasEdges( fm, aliasedParamIndices ); + } + + // clean up reachability on initial parameter shapes + og.globalSweep(); + + // this maps tokens to parameter indices and vice versa + // for when this method is a callee + og.prepareParamTokenMaps( fm ); + + // cache the graph + ReachGraph ogResult = new ReachGraph(); + ogResult.merge(og); + mapMethodContextToInitialParamAllocGraph.put(mc, ogResult); + + } else { + // or just leverage the cached copy + og.merge(ogInitParamAlloc); + } + break; + + case FKind.FlatOpNode: + FlatOpNode fon = (FlatOpNode) fn; + if( fon.getOp().getOp() == Operation.ASSIGN ) { + lhs = fon.getDest(); + rhs = fon.getLeft(); + og.assignTempXEqualToTempY(lhs, rhs); + } + break; + + case FKind.FlatCastNode: + FlatCastNode fcn = (FlatCastNode) fn; + lhs = fcn.getDst(); + rhs = fcn.getSrc(); + + TypeDescriptor td = fcn.getType(); + assert td != null; + + og.assignTempXEqualToCastedTempY(lhs, rhs, td); + break; + + case FKind.FlatFieldNode: + FlatFieldNode ffn = (FlatFieldNode) fn; + lhs = ffn.getDst(); + rhs = ffn.getSrc(); + fld = ffn.getField(); + if( !fld.getType().isImmutable() || fld.getType().isArray() ) { + og.assignTempXEqualToTempYFieldF(lhs, rhs, fld); + } + + meAnalysis.analyzeFlatFieldNode(mc, og, rhs, fld); + + break; + + case FKind.FlatSetFieldNode: + FlatSetFieldNode fsfn = (FlatSetFieldNode) fn; + lhs = fsfn.getDst(); + fld = fsfn.getField(); + rhs = fsfn.getSrc(); + if( !fld.getType().isImmutable() || fld.getType().isArray() ) { + og.assignTempXFieldFEqualToTempY(lhs, fld, rhs); + } + + meAnalysis.analyzeFlatSetFieldNode(mc, og, lhs, fld); + + break; + + case FKind.FlatElementNode: + FlatElementNode fen = (FlatElementNode) fn; + lhs = fen.getDst(); + rhs = fen.getSrc(); + if( !lhs.getType().isImmutable() || lhs.getType().isArray() ) { + + assert rhs.getType() != null; + assert rhs.getType().isArray(); + + TypeDescriptor tdElement = rhs.getType().dereference(); + FieldDescriptor fdElement = getArrayField( tdElement ); + + og.assignTempXEqualToTempYFieldF(lhs, rhs, fdElement); + } + break; + + case FKind.FlatSetElementNode: + FlatSetElementNode fsen = (FlatSetElementNode) fn; + + if( arrayReferencees.doesNotCreateNewReaching( fsen ) ) { + // skip this node if it cannot create new reachability paths + break; + } + + lhs = fsen.getDst(); + rhs = fsen.getSrc(); + if( !rhs.getType().isImmutable() || rhs.getType().isArray() ) { + + assert lhs.getType() != null; + assert lhs.getType().isArray(); + + TypeDescriptor tdElement = lhs.getType().dereference(); + FieldDescriptor fdElement = getArrayField( tdElement ); + + og.assignTempXFieldFEqualToTempY(lhs, fdElement, rhs); + } + break; + + case FKind.FlatNew: + FlatNew fnn = (FlatNew) fn; + lhs = fnn.getDst(); + if( !lhs.getType().isImmutable() || lhs.getType().isArray() ) { + AllocSite as = getAllocSiteFromFlatNewPRIVATE(fnn); + + if (mapMethodContextToLiveInAllocSiteSet != null){ + HashSet alllocSet=mapMethodContextToLiveInAllocSiteSet.get(mc); + if(alllocSet!=null){ + for (Iterator iterator = alllocSet.iterator(); iterator + .hasNext();) { + AllocSite allocationSite = (AllocSite) iterator + .next(); + if(allocationSite.flatNew.equals(as.flatNew)){ + as.setFlag(true); + } + } + } + } + + og.assignTempEqualToNewAlloc(lhs, as); + } + break; + + case FKind.FlatCall: + FlatCall fc = (FlatCall) fn; + MethodDescriptor md = fc.getMethod(); + FlatMethod flatm = state.getMethodFlat(md); + ReachGraph ogMergeOfAllPossibleCalleeResults = new ReachGraph(); + + if( md.isStatic() ) { + // a static method is simply always the same, makes life easy + ogMergeOfAllPossibleCalleeResults = og; + + Set aliasedParamIndices = + ogMergeOfAllPossibleCalleeResults.calculateAliasedParamSet(fc, md.isStatic(), flatm); + + MethodContext mcNew = new MethodContext( md, aliasedParamIndices ); + Set contexts = mapDescriptorToAllMethodContexts.get( md ); + assert contexts != null; + contexts.add( mcNew ); + + addDependent( mc, mcNew ); + + ReachGraph onlyPossibleCallee = mapMethodContextToCompleteReachabilityGraph.get( mcNew ); + + if( onlyPossibleCallee == null ) { + // if this method context has never been analyzed just schedule it for analysis + // and skip over this call site for now + if( !methodContextsToVisitSet.contains( mcNew ) ) { + methodContextsToVisitQ.add( new MethodContextQWrapper( mapDescriptorToPriority.get( md ), + mcNew ) ); + methodContextsToVisitSet.add( mcNew ); + } + + } else { + ogMergeOfAllPossibleCalleeResults.resolveMethodCall(fc, md.isStatic(), flatm, onlyPossibleCallee, mc, null); + } + + meAnalysis.createNewMapping(mcNew); + meAnalysis.analyzeFlatCall(ogMergeOfAllPossibleCalleeResults, mcNew, mc, fc); + + + } else { + // if the method descriptor is virtual, then there could be a + // set of possible methods that will actually be invoked, so + // find all of them and merge all of their results together + TypeDescriptor typeDesc = fc.getThis().getType(); + Set possibleCallees = callGraph.getMethods(md, typeDesc); + + Iterator i = possibleCallees.iterator(); + while( i.hasNext() ) { + MethodDescriptor possibleMd = (MethodDescriptor) i.next(); + FlatMethod pflatm = state.getMethodFlat(possibleMd); + + // don't alter the working graph (og) until we compute a result for every + // possible callee, merge them all together, then set og to that + ReachGraph ogCopy = new ReachGraph(); + ogCopy.merge(og); + + Set aliasedParamIndices = + ogCopy.calculateAliasedParamSet(fc, possibleMd.isStatic(), pflatm); + + MethodContext mcNew = new MethodContext( possibleMd, aliasedParamIndices ); + Set contexts = mapDescriptorToAllMethodContexts.get( md ); + assert contexts != null; + contexts.add( mcNew ); + + + meAnalysis.createNewMapping(mcNew); + + + addDependent( mc, mcNew ); + + ReachGraph ogPotentialCallee = mapMethodContextToCompleteReachabilityGraph.get( mcNew ); + + if( ogPotentialCallee == null ) { + // if this method context has never been analyzed just schedule it for analysis + // and skip over this call site for now + if( !methodContextsToVisitSet.contains( mcNew ) ) { + methodContextsToVisitQ.add( new MethodContextQWrapper( mapDescriptorToPriority.get( md ), + mcNew ) ); + methodContextsToVisitSet.add( mcNew ); + } + + } else { + ogCopy.resolveMethodCall(fc, possibleMd.isStatic(), pflatm, ogPotentialCallee, mc, null); + } + + ogMergeOfAllPossibleCalleeResults.merge(ogCopy); + + meAnalysis.analyzeFlatCall(ogMergeOfAllPossibleCalleeResults, mcNew, mc, fc); + } + + } + + og = ogMergeOfAllPossibleCalleeResults; + break; + + case FKind.FlatReturnNode: + FlatReturnNode frn = (FlatReturnNode) fn; + rhs = frn.getReturnTemp(); + if( rhs != null && !rhs.getType().isImmutable() ) { + og.assignReturnEqualToTemp(rhs); + } + setRetNodes.add(frn); + break; + } + + + if( methodEffects ) { + Hashtable table=mapMethodContextToFlatNodeReachabilityGraph.get(mc); + if(table==null){ + table=new Hashtable(); + } + table.put(fn, og); + mapMethodContextToFlatNodeReachabilityGraph.put(mc, table); + } + + return og; + } + + + // this method should generate integers strictly greater than zero! + // special "shadow" regions are made from a heap region by negating + // the ID + static public Integer generateUniqueHeapRegionNodeID() { + ++uniqueIDcount; + return new Integer(uniqueIDcount); + } + + + static public FieldDescriptor getArrayField( TypeDescriptor tdElement ) { + FieldDescriptor fdElement = mapTypeToArrayField.get( tdElement ); + if( fdElement == null ) { + fdElement = new FieldDescriptor(new Modifiers(Modifiers.PUBLIC), + tdElement, + arrayElementFieldName, + null, + false); + mapTypeToArrayField.put( tdElement, fdElement ); + } + return fdElement; + } + + + private void setGraphForMethodContext(MethodContext mc, ReachabilityGraph og) { + + mapMethodContextToCompleteReachabilityGraph.put(mc, og); + + if( writeFinalDOTs && writeAllIncrementalDOTs ) { + if( !mapMethodContextToNumUpdates.containsKey(mc) ) { + mapMethodContextToNumUpdates.put(mc, new Integer(0) ); + } + Integer n = mapMethodContextToNumUpdates.get(mc); + try { + og.writeGraph(mc+"COMPLETE"+String.format("%05d", n), + true, // write labels (variables) + true, // selectively hide intermediate temp vars + true, // prune unreachable heap regions + false, // show back edges to confirm graph validity + false, // show parameter indices (unmaintained!) + true, // hide subset reachability states + true); // hide edge taints + } catch( IOException e ) {} + mapMethodContextToNumUpdates.put(mc, n + 1); + } + } + + + private void addDependent( MethodContext caller, MethodContext callee ) { + HashSet deps = mapMethodContextToDependentContexts.get( callee ); + if( deps == null ) { + deps = new HashSet(); + } + deps.add( caller ); + mapMethodContextToDependentContexts.put( callee, deps ); + } + + private Iterator iteratorDependents( MethodContext callee ) { + HashSet deps = mapMethodContextToDependentContexts.get( callee ); + if( deps == null ) { + deps = new HashSet(); + mapMethodContextToDependentContexts.put( callee, deps ); + } + return deps.iterator(); + } + + + private void writeFinalContextGraphs() { + Set entrySet = mapMethodContextToCompleteReachabilityGraph.entrySet(); + Iterator itr = entrySet.iterator(); + while( itr.hasNext() ) { + Map.Entry me = (Map.Entry) itr.next(); + MethodContext mc = (MethodContext) me.getKey(); + ReachabilityGraph og = (ReachabilityGraph) me.getValue(); + + try { + og.writeGraph(mc+"COMPLETE", + true, // write labels (variables) + true, // selectively hide intermediate temp vars + true, // prune unreachable heap regions + false, // show back edges to confirm graph validity + false, // show parameter indices (unmaintained!) + true, // hide subset reachability states + true); // hide edge taints + } catch( IOException e ) {} + } + } + + + + // return just the allocation site associated with one FlatNew node + private AllocSite getAllocSiteFromFlatNewPRIVATE(FlatNew fn) { + + if( !mapFlatNewToAllocSite.containsKey(fn) ) { + AllocSite as = new AllocSite(allocationDepth, fn, fn.getDisjointAnalysisId()); + + // the newest nodes are single objects + for( int i = 0; i < allocationDepth; ++i ) { + Integer id = generateUniqueHeapRegionNodeID(); + as.setIthOldest(i, id); + mapHrnIdToAllocSite.put( id, as ); + } + + // the oldest node is a summary node + Integer idSummary = generateUniqueHeapRegionNodeID(); + as.setSummary(idSummary); + + mapFlatNewToAllocSite.put(fn, as); + } + + return mapFlatNewToAllocSite.get(fn); + } + + + // return all allocation sites in the method (there is one allocation + // site per FlatNew node in a method) + private HashSet getAllocSiteSet(Descriptor d) { + if( !mapDescriptorToAllocSiteSet.containsKey(d) ) { + buildAllocSiteSet(d); + } + + return mapDescriptorToAllocSiteSet.get(d); + + } + + private void buildAllocSiteSet(Descriptor d) { + HashSet s = new HashSet(); + + FlatMethod fm = state.getMethodFlat( d ); + + // visit every node in this FlatMethod's IR graph + // and make a set of the allocation sites from the + // FlatNew node's visited + HashSet visited = new HashSet(); + HashSet toVisit = new HashSet(); + toVisit.add( fm ); + + while( !toVisit.isEmpty() ) { + FlatNode n = toVisit.iterator().next(); + + if( n instanceof FlatNew ) { + s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) ); + } + + toVisit.remove( n ); + visited.add( n ); + + for( int i = 0; i < n.numNext(); ++i ) { + FlatNode child = n.getNext( i ); + if( !visited.contains( child ) ) { + toVisit.add( child ); + } + } + } + + mapDescriptorToAllocSiteSet.put( d, s ); + } + + + private HashSet getFlaggedAllocSites(Descriptor dIn) { + + HashSet out = new HashSet(); + HashSet toVisit = new HashSet(); + HashSet visited = new HashSet(); + + toVisit.add(dIn); + + while( !toVisit.isEmpty() ) { + Descriptor d = toVisit.iterator().next(); + toVisit.remove(d); + visited.add(d); + + HashSet asSet = getAllocSiteSet(d); + Iterator asItr = asSet.iterator(); + while( asItr.hasNext() ) { + AllocSite as = (AllocSite) asItr.next(); + if( as.getDisjointAnalysisId() != null ) { + out.add(as); + } + } + + // enqueue callees of this method to be searched for + // allocation sites also + Set callees = callGraph.getCalleeSet(d); + if( callees != null ) { + Iterator methItr = callees.iterator(); + while( methItr.hasNext() ) { + MethodDescriptor md = (MethodDescriptor) methItr.next(); + + if( !visited.contains(md) ) { + toVisit.add(md); + } + } + } + } + + return out; + } + + + private HashSet + getFlaggedAllocSitesReachableFromTaskPRIVATE(TaskDescriptor td) { + + HashSet asSetTotal = new HashSet(); + HashSet toVisit = new HashSet(); + HashSet visited = new HashSet(); + + toVisit.add(td); + + // traverse this task and all methods reachable from this task + while( !toVisit.isEmpty() ) { + Descriptor d = toVisit.iterator().next(); + toVisit.remove(d); + visited.add(d); + + HashSet asSet = getAllocSiteSet(d); + Iterator asItr = asSet.iterator(); + while( asItr.hasNext() ) { + AllocSite as = (AllocSite) asItr.next(); + TypeDescriptor typed = as.getType(); + if( typed != null ) { + ClassDescriptor cd = typed.getClassDesc(); + if( cd != null && cd.hasFlags() ) { + asSetTotal.add(as); + } + } + } + + // enqueue callees of this method to be searched for + // allocation sites also + Set callees = callGraph.getCalleeSet(d); + if( callees != null ) { + Iterator methItr = callees.iterator(); + while( methItr.hasNext() ) { + MethodDescriptor md = (MethodDescriptor) methItr.next(); + + if( !visited.contains(md) ) { + toVisit.add(md); + } + } + } + } + + + return asSetTotal; + } + */ + + private LinkedList topologicalSort( Set toSort ) { + + Set discovered = new HashSet (); + LinkedList sorted = new LinkedList(); + + Iterator itr = toSort.iterator(); + while( itr.hasNext() ) { + Descriptor d = itr.next(); + + if( !discovered.contains( d ) ) { + dfsVisit( d, toSort, sorted, discovered ); + } + } + + return sorted; + } + + private void dfsVisit( Descriptor d, + Set toSort, + LinkedList sorted, + Set discovered ) { + discovered.add( d ); + + // only methods have callers, tasks never do + if( d instanceof MethodDescriptor ) { + + MethodDescriptor md = (MethodDescriptor) d; + + // the call graph is not aware that we have a fabricated + // analysis entry that calls the program source's entry + if( md == mdSourceEntry ) { + if( !discovered.contains( mdAnalysisEntry ) ) { + dfsVisit( mdAnalysisEntry, toSort, sorted, discovered ); + } + } + + // otherwise call graph guides DFS + Iterator itr = callGraph.getCallerSet( md ).iterator(); + while( itr.hasNext() ) { + Descriptor dCaller = (Descriptor) itr.next(); + + // only consider callers in the original set to analyze + if( !toSort.contains( dCaller ) ) { + continue; + } + + if( !discovered.contains( dCaller ) ) { + dfsVisit( dCaller, toSort, sorted, discovered ); + } + } + } + + sorted.addFirst( d ); + } + + + /* + private String computeAliasContextHistogram() { + + Hashtable mapNumContexts2NumDesc = + new Hashtable(); + + Iterator itr = mapDescriptorToAllMethodContexts.entrySet().iterator(); + while( itr.hasNext() ) { + Map.Entry me = (Map.Entry) itr.next(); + HashSet s = (HashSet) me.getValue(); + + Integer i = mapNumContexts2NumDesc.get( s.size() ); + if( i == null ) { + i = new Integer( 0 ); + } + mapNumContexts2NumDesc.put( s.size(), i + 1 ); + } + + String s = ""; + int total = 0; + + itr = mapNumContexts2NumDesc.entrySet().iterator(); + while( itr.hasNext() ) { + Map.Entry me = (Map.Entry) itr.next(); + Integer c0 = (Integer) me.getKey(); + Integer d0 = (Integer) me.getValue(); + total += d0; + s += String.format( "%4d methods had %4d unique alias contexts.\n", d0, c0 ); + } + + s += String.format( "\n%4d total methods analayzed.\n", total ); + + return s; + } + + private int numMethodsAnalyzed() { + return descriptorsToAnalyze.size(); + } + */ + + + /* + // insert a call to debugSnapshot() somewhere in the analysis + // to get successive captures of the analysis state + boolean takeDebugSnapshots = false; + String mcDescSymbolDebug = "setRoute"; + boolean stopAfterCapture = true; + + // increments every visit to debugSnapshot, don't fiddle with it + // IMPORTANT NOTE FOR SETTING THE FOLLOWING VALUES: this + // counter increments just after every node is analyzed + // from the body of the method whose symbol is specified + // above. + int debugCounter = 0; + + // the value of debugCounter to start reporting the debugCounter + // to the screen to let user know what debug iteration we're at + int numStartCountReport = 0; + + // the frequency of debugCounter values to print out, 0 no report + int freqCountReport = 0; + + // the debugCounter value at which to start taking snapshots + int iterStartCapture = 0; + + // the number of snapshots to take + int numIterToCapture = 300; + + void debugSnapshot(ReachabilityGraph og, FlatNode fn) { + if( debugCounter > iterStartCapture + numIterToCapture ) { + return; + } + + ++debugCounter; + if( debugCounter > numStartCountReport && + freqCountReport > 0 && + debugCounter % freqCountReport == 0 ) { + System.out.println(" @@@ debug counter = "+debugCounter); + } + if( debugCounter > iterStartCapture ) { + System.out.println(" @@@ capturing debug "+(debugCounter-iterStartCapture)+" @@@"); + String graphName = String.format("snap%04d",debugCounter-iterStartCapture); + if( fn != null ) { + graphName = graphName+fn; + } + try { + og.writeGraph(graphName, + true, // write labels (variables) + true, // selectively hide intermediate temp vars + true, // prune unreachable heap regions + false, // show back edges to confirm graph validity + false, // show parameter indices (unmaintained!) + true, // hide subset reachability states + true); // hide edge taints + } catch( Exception e ) { + System.out.println("Error writing debug capture."); + System.exit(0); + } + } + + if( debugCounter == iterStartCapture + numIterToCapture && stopAfterCapture ) { + System.out.println("Stopping analysis after debug captures."); + System.exit(0); + } + } + */ + + + // Take in sourceEntry which is the program's compiled entry and + // create a new analysis entry, a method that takes no parameters + // and appears to allocate the command line arguments and call the + // sourceEntry with them. The purpose of this analysis entry is to + // provide a top-level method context with no parameters left. + private void makeAnalysisEntryMethod( MethodDescriptor sourceEntry ) { + + Modifiers mods = new Modifiers(); + mods.addModifier( Modifiers.PUBLIC ); + mods.addModifier( Modifiers.STATIC ); + + TypeDescriptor returnType = + new TypeDescriptor( TypeDescriptor.VOID ); + + this.mdAnalysisEntry = + new MethodDescriptor( mods, + returnType, + "analysisEntryMethod" + ); + + FlatExit fe = new FlatExit(); + + TempDescriptor cmdLineArgs = new TempDescriptor( "args" ); + + FlatNew fn = new FlatNew( sourceEntry.getParamType( 0 ), + cmdLineArgs, + false // is global + ); + + //FlatCall fc = new + + this.fmAnalysisEntry = new FlatMethod( mdAnalysisEntry, fe ); + } +} diff --git a/Robust/src/Analysis/Disjoint/HeapRegionNode.java b/Robust/src/Analysis/Disjoint/HeapRegionNode.java new file mode 100644 index 00000000..dbbaf1aa --- /dev/null +++ b/Robust/src/Analysis/Disjoint/HeapRegionNode.java @@ -0,0 +1,234 @@ +package Analysis.DisjointAnalysis; + +import IR.*; +import IR.Flat.*; +import java.util.*; + +public class HeapRegionNode extends RefSrcNode { + + protected Integer id; + + protected boolean isSingleObject; + protected boolean isFlagged; + protected boolean isParameter; + protected boolean isNewSummary; + + protected HashSet referencers; + + protected TypeDescriptor type; + + protected AllocSite allocSite; + + protected ReachSet alpha; + protected ReachSet alphaNew; + + protected String description; + + protected String globalIdentifier; + + + + public HeapRegionNode(Integer id, + boolean isSingleObject, + boolean isFlagged, + boolean isParameter, + boolean isNewSummary, + TypeDescriptor type, + AllocSite allocSite, + ReachSet alpha, + String description, + String globalIdentifier) { + this.id = id; + this.isSingleObject = isSingleObject; + this.isFlagged = isFlagged; + this.isParameter = isParameter; + this.isNewSummary = isNewSummary; + this.type = type; + this.allocSite = allocSite; + this.alpha = alpha; + this.description = description; + this.globalIdentifier = globalIdentifier; + + referencers = new HashSet(); + alphaNew = new ReachSet().makeCanonical(); + } + + public HeapRegionNode copy() { + return new HeapRegionNode(id, + isSingleObject, + isFlagged, + isParameter, + isNewSummary, + type, + allocSite, + alpha, + description, + globalIdentifier); + } + + + public Integer getID() { + return id; + } + + + public boolean equalsIncludingAlpha(HeapRegionNode hrn) { + return equals(hrn) && alpha.equals(hrn.alpha); + } + + + public boolean equals(Object o) { + if( o == null ) { + return false; + } + + if( !( o instanceof HeapRegionNode) ) { + return false; + } + + HeapRegionNode hrn = (HeapRegionNode) o; + + if( !id.equals(hrn.getID() ) ) { + return false; + } + + assert isSingleObject == hrn.isSingleObject(); + assert isFlagged == hrn.isFlagged(); + assert isParameter == hrn.isParameter(); + assert isNewSummary == hrn.isNewSummary(); + assert description.equals(hrn.getDescription() ); + + return true; + } + + public int hashCode() { + return id.intValue()*17; + } + + + public boolean isSingleObject() { + return isSingleObject; + } + + public boolean isFlagged() { + return isFlagged; + } + + public boolean isParameter() { + return isParameter; + } + + public boolean isNewSummary() { + return isNewSummary; + } + + + + public Iterator iteratorToReferencers() { + return referencers.iterator(); + } + + public Iterator iteratorToReferencersClone() { + HashSet clone = (HashSet)referencers.clone(); + return clone.iterator(); + } + + public int getNumReferencers() { + return referencers.size(); + } + + + public void addReferencer(RefEdge edge) { + assert edge != null; + + referencers.add(edge); + } + + public void removeReferencer(RefEdge edge) { + assert edge != null; + assert referencers.contains(edge); + + referencers.remove(edge); + } + + public RefEdge getReferenceFrom(RefSrcNode on, + TypeDescriptor type, + String field) { + assert on != null; + + Iterator itrEdge = referencers.iterator(); + while( itrEdge.hasNext() ) { + RefEdge edge = itrEdge.next(); + if( edge.getSrc().equals(on) && + edge.typeEquals(type) && + edge.fieldEquals(field) ) { + return edge; + } + } + + return null; + } + + + public TypeDescriptor getType() { + return type; + } + + public AllocSite getAllocSite() { + return allocSite; + } + + + public void setAlpha(ReachSet alpha) { + this.alpha = alpha; + } + + public ReachSet getAlpha() { + return alpha; + } + + public ReachSet getAlphaNew() { + return alphaNew; + } + + public void setAlphaNew(ReachSet alpha) { + this.alphaNew = alpha; + } + + public void applyAlphaNew() { + assert alphaNew != null; + alpha = alphaNew; + alphaNew = new ReachSet().makeCanonical(); + } + + + public String getIDString() { + String s; + + if( id < 0 ) { + s = "minus" + new Integer(-id).toString(); + } else { + s = id.toString(); + } + + return s; + } + + public String getAlphaString( boolean hideSubsetReachability ) { + return alpha.toStringEscapeNewline(hideSubsetReachability); + } + + public String toString() { + return "HRN"+getIDString(); + } + + // WHY WHY WHY WHY WHY WHY?! + public String getDescription() { + return new String(description); + //return new String( description+" ID "+getIDString() ); + } + + public String getGloballyUniqueIdentifier(){ + return globalIdentifier; + } +} diff --git a/Robust/src/Analysis/Disjoint/ReachGraph.java b/Robust/src/Analysis/Disjoint/ReachGraph.java new file mode 100644 index 00000000..295d988b --- /dev/null +++ b/Robust/src/Analysis/Disjoint/ReachGraph.java @@ -0,0 +1,4926 @@ +package Analysis.Disjoint; + +import IR.*; +import IR.Flat.*; +import Util.UtilAlgorithms; +import java.util.*; +import java.io.*; + +public class ReachGraph { + + protected static final TempDescriptor tdReturn = new TempDescriptor( "_Return___" ); + + /* + // some frequently used reachability constants + protected static final ReachState rstateEmpty = new ReachTupleSet().makeCanonical(); + protected static final ReachSet rsetEmpty = new ReachSet().makeCanonical(); + protected static final ReachSet rsetWithEmptyState = new ReachSet( rstateEmpty ).makeCanonical(); + + public Hashtable id2hrn; + public Hashtable td2vn; + + public HashSet allocSites; + */ + + // use to disable improvements for comparison + protected static final boolean DISABLE_STRONG_UPDATES = false; + protected static final boolean DISABLE_GLOBAL_SWEEP = false; + + protected static int allocationDepth = -1; + protected static TypeUtil typeUtil = null; + protected static boolean debugCallMap = false; + protected static int debugCallMapCount = 0; + protected static String debugCallee = null; + protected static String debugCaller = null; + + + /* + public ReachGraph() { + id2hrn = new Hashtable(); + td2vn = new Hashtable(); + + allocSites = new HashSet(); + } + + + // temp descriptors are globally unique and maps to + // exactly one variable node, easy + protected VariableNode getVariableNodeFromTemp( TempDescriptor td ) { + assert td != null; + + if( !td2vn.containsKey( td ) ) { + td2vn.put( td, new VariableNode( td ) ); + } + + return td2vn.get( td ); + } + + + // the reason for this method is to have the option + // of creating new heap regions with specific IDs, or + // duplicating heap regions with specific IDs (especially + // in the merge() operation) or to create new heap + // regions with a new unique ID + protected HeapRegionNode + createNewHeapRegionNode( Integer id, + boolean isSingleObject, + boolean isNewSummary, + boolean isFlagged, + TypeDescriptor type, + AllocSite allocSite, + ReachSet alpha, + String description, + String globalIdentifier ) { + + boolean markForAnalysis = isFlagged; + + TypeDescriptor typeToUse = null; + if( allocSite != null ) { + typeToUse = allocSite.getType(); + } else { + typeToUse = type; + } + + if( allocSite != null && allocSite.getDisjointAnalysisId() != null ) { + markForAnalysis = true; + } + + if( id == null ) { + id = DisjointAnalysis.generateUniqueHeapRegionNodeID(); + } + + if( alpha == null ) { + if( markForAnalysis ) { + alpha = new ReachSet( + new ReachTuple( id, + !isSingleObject, + ReachTuple.ARITY_ONE + ).makeCanonical() + ).makeCanonical(); + } else { + alpha = new ReachSet( + new ReachState().makeCanonical() + ).makeCanonical(); + } + } + + HeapRegionNode hrn = new HeapRegionNode( id, + isSingleObject, + markForAnalysis, + isNewSummary, + typeToUse, + allocSite, + alpha, + description, + globalIdentifier ); + id2hrn.put( id, hrn ); + return hrn; + } + + + + //////////////////////////////////////////////// + // + // Low-level referencee and referencer methods + // + // These methods provide the lowest level for + // creating references between ownership nodes + // and handling the details of maintaining both + // list of referencers and referencees. + // + //////////////////////////////////////////////// + protected void addRefEdge(RefSrcNode referencer, + HeapRegionNode referencee, + RefEdge edge) { + assert referencer != null; + assert referencee != null; + assert edge != null; + assert edge.getSrc() == referencer; + assert edge.getDst() == referencee; + + referencer.addReferencee(edge); + referencee.addReferencer(edge); + } + + protected void removeRefEdge(RefEdge e) { + removeRefEdge(e.getSrc(), + e.getDst(), + e.getType(), + e.getField() ); + } + + protected void removeRefEdge(RefSrcNode referencer, + HeapRegionNode referencee, + TypeDescriptor type, + String field) { + assert referencer != null; + assert referencee != null; + + RefEdge edge = referencer.getReferenceTo(referencee, + type, + field); + assert edge != null; + assert edge == referencee.getReferenceFrom(referencer, + type, + field); + +// int oldTaint=edge.getTaintIdentifier(); +// if(referencer instanceof HeapRegionNode){ +// depropagateTaintIdentifier((HeapRegionNode)referencer,oldTaint,new HashSet()); +// } + + referencer.removeReferencee(edge); + referencee.removeReferencer(edge); + } + + protected void clearRefEdgesFrom(RefSrcNode referencer, + TypeDescriptor type, + String field, + boolean removeAll) { + assert referencer != null; + + // get a copy of the set to iterate over, otherwise + // we will be trying to take apart the set as we + // are iterating over it, which won't work + Iterator i = referencer.iteratorToReferenceesClone(); + while( i.hasNext() ) { + RefEdge edge = i.next(); + + if( removeAll || + (edge.typeEquals( type ) && edge.fieldEquals( field )) + ){ + + HeapRegionNode referencee = edge.getDst(); + + removeRefEdge(referencer, + referencee, + edge.getType(), + edge.getField() ); + } + } + } + + protected void clearRefEdgesTo(HeapRegionNode referencee, + TypeDescriptor type, + String field, + boolean removeAll) { + assert referencee != null; + + // get a copy of the set to iterate over, otherwise + // we will be trying to take apart the set as we + // are iterating over it, which won't work + Iterator i = referencee.iteratorToReferencersClone(); + while( i.hasNext() ) { + RefEdge edge = i.next(); + + if( removeAll || + (edge.typeEquals( type ) && edge.fieldEquals( field )) + ){ + + RefSrcNode referencer = edge.getSrc(); + + removeRefEdge(referencer, + referencee, + edge.getType(), + edge.getField() ); + } + } + } + + + //////////////////////////////////////////////////// + // + // Assignment Operation Methods + // + // These methods are high-level operations for + // modeling program assignment statements using + // the low-level reference create/remove methods + // above. + // + // The destination in an assignment statement is + // going to have new references. The method of + // determining the references depends on the type + // of the FlatNode assignment and the predicates + // of the nodes and edges involved. + // + //////////////////////////////////////////////////// + + public void nullifyDeadVars( Set liveIn ) { + + // make a set of the temps that are out of scope, don't + // consider them when nullifying dead in-scope variables + Set outOfScope = new HashSet(); + outOfScope.add( tdReturn ); + outOfScope.add( tdAliasBlob ); + outOfScope.addAll( paramIndex2tdQ.values() ); + outOfScope.addAll( paramIndex2tdR.values() ); + + Iterator varItr = td2vn.entrySet().iterator(); + while( varItr.hasNext() ) { + Map.Entry me = (Map.Entry) varItr.next(); + TempDescriptor td = (TempDescriptor) me.getKey(); + VariableNode ln = (VariableNode) me.getValue(); + + // if this variable is not out-of-scope or live + // in graph, nullify its references to anything + if( !outOfScope.contains( td ) && + !liveIn.contains( td ) + ) { + clearRefEdgesFrom( ln, null, null, true ); + } + } + } + + + public void assignTempXEqualToTempY( TempDescriptor x, + TempDescriptor y ) { + assignTempXEqualToCastedTempY( x, y, null ); + } + + public void assignTempXEqualToCastedTempY( TempDescriptor x, + TempDescriptor y, + TypeDescriptor tdCast ) { + + VariableNode lnX = getVariableNodeFromTemp( x ); + VariableNode lnY = getVariableNodeFromTemp( y ); + + clearRefEdgesFrom( lnX, null, null, true ); + + // note it is possible that the types of temps in the + // flat node to analyze will reveal that some typed + // edges in the reachability graph are impossible + Set impossibleEdges = new HashSet(); + + Iterator itrYhrn = lnY.iteratorToReferencees(); + while( itrYhrn.hasNext() ) { + RefEdge edgeY = itrYhrn.next(); + HeapRegionNode referencee = edgeY.getDst(); + RefEdge edgeNew = edgeY.copy(); + + if( !isSuperiorType( x.getType(), edgeY.getType() ) ) { + impossibleEdges.add( edgeY ); + continue; + } + + edgeNew.setSrc( lnX ); + + edgeNew.setType( mostSpecificType( y.getType(), + tdCast, + edgeY.getType(), + referencee.getType() + ) + ); + + edgeNew.setField( null ); + + addRefEdge( lnX, referencee, edgeNew ); + } + + Iterator itrImp = impossibleEdges.iterator(); + while( itrImp.hasNext() ) { + RefEdge edgeImp = itrImp.next(); + removeRefEdge( edgeImp ); + } + } + + + public void assignTempXEqualToTempYFieldF( TempDescriptor x, + TempDescriptor y, + FieldDescriptor f ) { + VariableNode lnX = getVariableNodeFromTemp( x ); + VariableNode lnY = getVariableNodeFromTemp( y ); + + clearRefEdgesFrom( lnX, null, null, true ); + + // note it is possible that the types of temps in the + // flat node to analyze will reveal that some typed + // edges in the reachability graph are impossible + Set impossibleEdges = new HashSet(); + + Iterator itrYhrn = lnY.iteratorToReferencees(); + while( itrYhrn.hasNext() ) { + RefEdge edgeY = itrYhrn.next(); + HeapRegionNode hrnY = edgeY.getDst(); + ReachSet betaY = edgeY.getBeta(); + + Iterator itrHrnFhrn = hrnY.iteratorToReferencees(); + while( itrHrnFhrn.hasNext() ) { + RefEdge edgeHrn = itrHrnFhrn.next(); + HeapRegionNode hrnHrn = edgeHrn.getDst(); + ReachSet betaHrn = edgeHrn.getBeta(); + + // prune edges that are not a matching field + if( edgeHrn.getType() != null && + !edgeHrn.getField().equals( f.getSymbol() ) + ) { + continue; + } + + // check for impossible edges + if( !isSuperiorType( x.getType(), edgeHrn.getType() ) ) { + impossibleEdges.add( edgeHrn ); + continue; + } + + TypeDescriptor tdNewEdge = + mostSpecificType( edgeHrn.getType(), + hrnHrn.getType() + ); + + RefEdge edgeNew = new RefEdge( lnX, + hrnHrn, + tdNewEdge, + null, + false, + betaY.intersection( betaHrn ) + ); + + int newTaintIdentifier=getTaintIdentifierFromHRN(hrnHrn); + edgeNew.setTaintIdentifier(newTaintIdentifier); + + addRefEdge( lnX, hrnHrn, edgeNew ); + } + } + + Iterator itrImp = impossibleEdges.iterator(); + while( itrImp.hasNext() ) { + RefEdge edgeImp = itrImp.next(); + removeRefEdge( edgeImp ); + } + + // anytime you might remove edges between heap regions + // you must global sweep to clean up broken reachability + if( !impossibleEdges.isEmpty() ) { + if( !DISABLE_GLOBAL_SWEEP ) { + globalSweep(); + } + } + } + + + public void assignTempXFieldFEqualToTempY( TempDescriptor x, + FieldDescriptor f, + TempDescriptor y ) { + + VariableNode lnX = getVariableNodeFromTemp( x ); + VariableNode lnY = getVariableNodeFromTemp( y ); + + HashSet nodesWithNewAlpha = new HashSet(); + HashSet edgesWithNewBeta = new HashSet(); + + // note it is possible that the types of temps in the + // flat node to analyze will reveal that some typed + // edges in the reachability graph are impossible + Set impossibleEdges = new HashSet(); + + // first look for possible strong updates and remove those edges + boolean strongUpdate = false; + + Iterator itrXhrn = lnX.iteratorToReferencees(); + while( itrXhrn.hasNext() ) { + RefEdge edgeX = itrXhrn.next(); + HeapRegionNode hrnX = edgeX.getDst(); + + // we can do a strong update here if one of two cases holds + if( f != null && + f != DisjointAnalysis.getArrayField( f.getType() ) && + ( (hrnX.getNumReferencers() == 1) || // case 1 + (hrnX.isSingleObject() && lnX.getNumReferencees() == 1) // case 2 + ) + ) { + if( !DISABLE_STRONG_UPDATES ) { + strongUpdate = true; + clearRefEdgesFrom( hrnX, f.getType(), f.getSymbol(), false ); + } + } + } + + // then do all token propagation + itrXhrn = lnX.iteratorToReferencees(); + while( itrXhrn.hasNext() ) { + RefEdge edgeX = itrXhrn.next(); + HeapRegionNode hrnX = edgeX.getDst(); + ReachSet betaX = edgeX.getBeta(); + ReachSet R = hrnX.getAlpha().intersection( edgeX.getBeta() ); + + Iterator itrYhrn = lnY.iteratorToReferencees(); + while( itrYhrn.hasNext() ) { + RefEdge edgeY = itrYhrn.next(); + HeapRegionNode hrnY = edgeY.getDst(); + ReachSet O = edgeY.getBeta(); + + // check for impossible edges + if( !isSuperiorType( f.getType(), edgeY.getType() ) ) { + impossibleEdges.add( edgeY ); + continue; + } + + // propagate tokens over nodes starting from hrnSrc, and it will + // take care of propagating back up edges from any touched nodes + ChangeSet Cy = O.unionUpArityToChangeSet( R ); + propagateTokensOverNodes( hrnY, Cy, nodesWithNewAlpha, edgesWithNewBeta ); + + + // then propagate back just up the edges from hrn + ChangeSet Cx = R.unionUpArityToChangeSet(O); + HashSet todoEdges = new HashSet(); + + Hashtable edgePlannedChanges = + new Hashtable(); + + Iterator referItr = hrnX.iteratorToReferencers(); + while( referItr.hasNext() ) { + RefEdge edgeUpstream = referItr.next(); + todoEdges.add( edgeUpstream ); + edgePlannedChanges.put( edgeUpstream, Cx ); + } + + propagateTokensOverEdges( todoEdges, + edgePlannedChanges, + edgesWithNewBeta ); + } + } + + + // apply the updates to reachability + Iterator nodeItr = nodesWithNewAlpha.iterator(); + while( nodeItr.hasNext() ) { + nodeItr.next().applyAlphaNew(); + } + + Iterator edgeItr = edgesWithNewBeta.iterator(); + while( edgeItr.hasNext() ) { + edgeItr.next().applyBetaNew(); + } + + + // then go back through and add the new edges + itrXhrn = lnX.iteratorToReferencees(); + while( itrXhrn.hasNext() ) { + RefEdge edgeX = itrXhrn.next(); + HeapRegionNode hrnX = edgeX.getDst(); + + Iterator itrYhrn = lnY.iteratorToReferencees(); + while( itrYhrn.hasNext() ) { + RefEdge edgeY = itrYhrn.next(); + HeapRegionNode hrnY = edgeY.getDst(); + + // skip impossible edges here, we already marked them + // when computing reachability propagations above + if( !isSuperiorType( f.getType(), edgeY.getType() ) ) { + continue; + } + + // prepare the new reference edge hrnX.f -> hrnY + TypeDescriptor tdNewEdge = + mostSpecificType( y.getType(), + edgeY.getType(), + hrnY.getType() + ); + + RefEdge edgeNew = new RefEdge( hrnX, + hrnY, + tdNewEdge, + f.getSymbol(), + false, + edgeY.getBeta().pruneBy( hrnX.getAlpha() ) + ); + + // look to see if an edge with same field exists + // and merge with it, otherwise just add the edge + RefEdge edgeExisting = hrnX.getReferenceTo( hrnY, + tdNewEdge, + f.getSymbol() ); + + if( edgeExisting != null ) { + edgeExisting.setBeta( + edgeExisting.getBeta().union( edgeNew.getBeta() ) + ); + + if((!hrnX.isParameter() && hrnY.isParameter()) || ( hrnX.isParameter() && hrnY.isParameter())){ + int newTaintIdentifier=getTaintIdentifierFromHRN(hrnY); + edgeExisting.unionTaintIdentifier(newTaintIdentifier); + } + // a new edge here cannot be reflexive, so existing will + // always be also not reflexive anymore + edgeExisting.setIsInitialParam( false ); + } else { + + if((!hrnX.isParameter() && hrnY.isParameter()) || ( hrnX.isParameter() && hrnY.isParameter())){ + int newTaintIdentifier=getTaintIdentifierFromHRN(hrnY); + edgeNew.setTaintIdentifier(newTaintIdentifier); + } + //currently, taint isn't propagated through the chain of refrences + //propagateTaintIdentifier(hrnX,newTaintIdentifier,new HashSet()); + + addRefEdge( hrnX, hrnY, edgeNew ); + } + } + } + + Iterator itrImp = impossibleEdges.iterator(); + while( itrImp.hasNext() ) { + RefEdge edgeImp = itrImp.next(); + removeRefEdge( edgeImp ); + } + + // if there was a strong update, make sure to improve + // reachability with a global sweep + if( strongUpdate || !impossibleEdges.isEmpty() ) { + if( !DISABLE_GLOBAL_SWEEP ) { + globalSweep(); + } + } + } + + + // the parameter model is to use a single-object heap region + // for the primary parameter, and a multiple-object heap + // region for the secondary objects reachable through the + // primary object, if necessary + public void assignTempEqualToParamAlloc( TempDescriptor td, + boolean isTask, + Integer paramIndex, FlatMethod fm ) { + assert td != null; + + TypeDescriptor typeParam = td.getType(); + assert typeParam != null; + + // either the parameter is an array or a class to be in this method + assert typeParam.isArray() || typeParam.isClass(); + + // discover some info from the param type and use it below + // to get parameter model as precise as we can + boolean createSecondaryRegion = false; + Set primary2primaryFields = new HashSet(); + Set primary2secondaryFields = new HashSet(); + + // there might be an element reference for array types + if( typeParam.isArray() ) { + // only bother with this if the dereferenced type can + // affect reachability + TypeDescriptor typeDeref = typeParam.dereference(); + if( !typeDeref.isImmutable() || typeDeref.isArray() ) { + primary2secondaryFields.add( + DisjointAnalysis.getArrayField( typeDeref ) + ); + createSecondaryRegion = true; + + // also handle a special case where an array of objects + // can point back to the array, which is an object! + if( typeParam.toPrettyString().equals( "Object[]" ) && + typeDeref.toPrettyString().equals( "Object" ) ) { + + primary2primaryFields.add( + DisjointAnalysis.getArrayField( typeDeref ) + ); + } + } + } + + // there might be member references for class types + if( typeParam.isClass() ) { + ClassDescriptor cd = typeParam.getClassDesc(); + while( cd != null ) { + + Iterator fieldItr = cd.getFields(); + while( fieldItr.hasNext() ) { + + FieldDescriptor fd = (FieldDescriptor) fieldItr.next(); + TypeDescriptor typeField = fd.getType(); + assert typeField != null; + + if( !typeField.isImmutable() || typeField.isArray() ) { + primary2secondaryFields.add( fd ); + createSecondaryRegion = true; + } + + if( typeUtil.isSuperorType( typeField, typeParam ) ) { + primary2primaryFields.add( fd ); + } + } + + cd = cd.getSuperDesc(); + } + } + + + // now build everything we need + VariableNode lnParam = getVariableNodeFromTemp( td ); + HeapRegionNode hrnPrimary = createNewHeapRegionNode( null, // id or null to generate a new one + true, // single object? + false, // summary? + false, // flagged? + true, // is a parameter? + typeParam, // type + null, // allocation site + null, // reachability set + "param"+paramIndex+" obj", + generateUniqueIdentifier(fm,paramIndex,"P")); + + parameterTemps.add( td ); + parameterLabels.add( lnParam ); + + + // this is a non-program-accessible label that picks up beta + // info to be used for fixing a caller of this method + TempDescriptor tdParamQ = new TempDescriptor( td+qString ); + paramIndex2tdQ.put( paramIndex, tdParamQ ); + VariableNode lnParamQ = getVariableNodeFromTemp( tdParamQ ); + + outOfScopeTemps.add( tdParamQ ); + outOfScopeLabels.add( lnParamQ ); + + // keep track of heap regions that were created for + // parameter labels, the index of the parameter they + // are for is important when resolving method calls + Integer newPrimaryID = hrnPrimary.getID(); + assert !idPrimary2paramIndexSet.containsKey( newPrimaryID ); + Set s = new HashSet(); + s.add( paramIndex ); + idPrimary2paramIndexSet.put( newPrimaryID, s ); + paramIndex2idPrimary.put( paramIndex, newPrimaryID ); + + ReachTuple ttPrimary = new ReachTuple( newPrimaryID, + false, // multi-object + ReachTuple.ARITY_ONE ).makeCanonical(); + + + HeapRegionNode hrnSecondary = null; + Integer newSecondaryID = null; + ReachTuple ttSecondary = null; + TempDescriptor tdParamR = null; + VariableNode lnParamR = null; + + if( createSecondaryRegion ) { + tdParamR = new TempDescriptor( td+rString ); + paramIndex2tdR.put( paramIndex, tdParamR ); + lnParamR = getVariableNodeFromTemp( tdParamR ); + + outOfScopeTemps.add( tdParamR ); + outOfScopeLabels.add( lnParamR ); + + hrnSecondary = createNewHeapRegionNode( null, // id or null to generate a new one + false, // single object? + false, // summary? + false, // flagged? + true, // is a parameter? + null, // type + null, // allocation site + null, // reachability set + "param"+paramIndex+" reachable", + generateUniqueIdentifier(fm,paramIndex,"S")); + + newSecondaryID = hrnSecondary.getID(); + assert !idSecondary2paramIndexSet.containsKey( newSecondaryID ); + Set s2 = new HashSet(); + s2.add( paramIndex ); + idSecondary2paramIndexSet.put( newSecondaryID, s2 ); + paramIndex2idSecondary.put( paramIndex, newSecondaryID ); + + + ttSecondary = new ReachTuple( newSecondaryID, + true, // multi-object + ReachTuple.ARITY_ONE ).makeCanonical(); + } + + // use a beta that has everything and put it all over the + // parameter model, then use a global sweep later to fix + // it up, since parameters can have different shapes + ReachState tts0 = new ReachTupleSet( ttPrimary ).makeCanonical(); + ReachSet betaSoup; + if( createSecondaryRegion ) { + ReachState tts1 = new ReachTupleSet( ttSecondary ).makeCanonical(); + ReachState tts2 = new ReachTupleSet( ttPrimary ).makeCanonical().union( ttSecondary ); + betaSoup = ReachSet.factory( tts0 ).union( tts1 ).union( tts2 ); + } else { + betaSoup = ReachSet.factory( tts0 ); + } + + RefEdge edgeFromLabel = + new RefEdge( lnParam, // src + hrnPrimary, // dst + typeParam, // type + null, // field + false, // special param initial (not needed on label->node) + betaSoup ); // reachability + edgeFromLabel.tainedBy(paramIndex); + addRefEdge( lnParam, hrnPrimary, edgeFromLabel ); + + RefEdge edgeFromLabelQ = + new RefEdge( lnParamQ, // src + hrnPrimary, // dst + null, // type + null, // field + false, // special param initial (not needed on label->node) + betaSoup ); // reachability + edgeFromLabelQ.tainedBy(paramIndex); + addRefEdge( lnParamQ, hrnPrimary, edgeFromLabelQ ); + + RefEdge edgeSecondaryReflexive; + if( createSecondaryRegion ) { + edgeSecondaryReflexive = + new RefEdge( hrnSecondary, // src + hrnSecondary, // dst + null, // match all types + null, // match all fields + true, // special param initial + betaSoup ); // reachability + addRefEdge( hrnSecondary, hrnSecondary, edgeSecondaryReflexive ); + + RefEdge edgeSecondary2Primary = + new RefEdge( hrnSecondary, // src + hrnPrimary, // dst + null, // match all types + null, // match all fields + true, // special param initial + betaSoup ); // reachability + addRefEdge( hrnSecondary, hrnPrimary, edgeSecondary2Primary ); + + RefEdge edgeFromLabelR = + new RefEdge( lnParamR, // src + hrnSecondary, // dst + null, // type + null, // field + false, // special param initial (not needed on label->node) + betaSoup ); // reachability + edgeFromLabelR.tainedBy(paramIndex); + addRefEdge( lnParamR, hrnSecondary, edgeFromLabelR ); + } + + Iterator fieldItr = primary2primaryFields.iterator(); + while( fieldItr.hasNext() ) { + FieldDescriptor fd = fieldItr.next(); + + RefEdge edgePrimaryReflexive = + new RefEdge( hrnPrimary, // src + hrnPrimary, // dst + fd.getType(), // type + fd.getSymbol(), // field + true, // special param initial + betaSoup ); // reachability + addRefEdge( hrnPrimary, hrnPrimary, edgePrimaryReflexive ); + } + + fieldItr = primary2secondaryFields.iterator(); + while( fieldItr.hasNext() ) { + FieldDescriptor fd = fieldItr.next(); + + RefEdge edgePrimary2Secondary = + new RefEdge( hrnPrimary, // src + hrnSecondary, // dst + fd.getType(), // type + fd.getSymbol(), // field + true, // special param initial + betaSoup ); // reachability + addRefEdge( hrnPrimary, hrnSecondary, edgePrimary2Secondary ); + } + } + + + public void makeAliasedParamHeapRegionNode(FlatMethod fm) { + + VariableNode lnBlob = getVariableNodeFromTemp( tdAliasBlob ); + + outOfScopeTemps.add( tdAliasBlob ); + outOfScopeLabels.add( lnBlob ); + + HeapRegionNode hrn = createNewHeapRegionNode( null, // id or null to generate a new one + false, // single object? + false, // summary? + false, // flagged? + true, // is a parameter? + null, // type + null, // allocation site + null, // reachability set + "aliasedParams", + generateUniqueIdentifier(fm,0,"A")); + + + ReachSet beta = new ReachSet( new ReachTuple( hrn.getID(), + true, + ReachTuple.ARITY_ONE).makeCanonical() + ).makeCanonical(); + + RefEdge edgeFromLabel = + new RefEdge( lnBlob, hrn, null, null, false, beta ); + + RefEdge edgeReflexive = + new RefEdge( hrn, hrn, null, null, true, beta ); + + addRefEdge( lnBlob, hrn, edgeFromLabel ); + addRefEdge( hrn, hrn, edgeReflexive ); + } + + + public void assignTempEqualToAliasedParam( TempDescriptor tdParam, + Integer paramIndex, FlatMethod fm ) { + assert tdParam != null; + + TypeDescriptor typeParam = tdParam.getType(); + assert typeParam != null; + + VariableNode lnParam = getVariableNodeFromTemp( tdParam ); + VariableNode lnAliased = getVariableNodeFromTemp( tdAliasBlob ); + + parameterTemps.add( tdParam ); + parameterLabels.add( lnParam ); + + // this is a non-program-accessible label that picks up beta + // info to be used for fixing a caller of this method + TempDescriptor tdParamQ = new TempDescriptor( tdParam+qString ); + TempDescriptor tdParamR = new TempDescriptor( tdParam+rString ); + + paramIndex2tdQ.put( paramIndex, tdParamQ ); + paramIndex2tdR.put( paramIndex, tdParamR ); + + VariableNode lnParamQ = getVariableNodeFromTemp( tdParamQ ); + VariableNode lnParamR = getVariableNodeFromTemp( tdParamR ); + + outOfScopeTemps.add( tdParamR ); + outOfScopeLabels.add( lnParamR ); + outOfScopeTemps.add( tdParamQ ); + outOfScopeLabels.add( lnParamQ ); + + // the lnAliased should always only reference one node, and that + // heap region node is the aliased param blob + assert lnAliased.getNumReferencees() == 1; + HeapRegionNode hrnAliasBlob = lnAliased.iteratorToReferencees().next().getDst(); + Integer idAliased = hrnAliasBlob.getID(); + + + ReachTuple ttAliased = new ReachTuple( idAliased, + true, // multi-object + ReachTuple.ARITY_ONE ).makeCanonical(); + + + HeapRegionNode hrnPrimary = createNewHeapRegionNode( null, // id or null to generate a new one + true, // single object? + false, // summary? + false, // flagged? + true, // is a parameter? + typeParam, // type + null, // allocation site + null, // reachability set + "param"+paramIndex+" obj", + generateUniqueIdentifier(fm, paramIndex.intValue(), "P")); + + Integer newPrimaryID = hrnPrimary.getID(); + assert !idPrimary2paramIndexSet.containsKey( newPrimaryID ); + Set s1 = new HashSet(); + s1.add( paramIndex ); + idPrimary2paramIndexSet.put( newPrimaryID, s1 ); + paramIndex2idPrimary.put( paramIndex, newPrimaryID ); + + Set s2 = idSecondary2paramIndexSet.get( idAliased ); + if( s2 == null ) { + s2 = new HashSet(); + } + s2.add( paramIndex ); + idSecondary2paramIndexSet.put( idAliased, s2 ); + paramIndex2idSecondary.put( paramIndex, idAliased ); + + + + ReachTuple ttPrimary = new ReachTuple( newPrimaryID, + false, // multi-object + ReachTuple.ARITY_ONE ).makeCanonical(); + + + ReachState tts0 = new ReachTupleSet( ttPrimary ).makeCanonical(); + ReachState tts1 = new ReachTupleSet( ttAliased ).makeCanonical(); + ReachState tts2 = new ReachTupleSet( ttPrimary ).makeCanonical().union( ttAliased ); + ReachSet betaSoup = ReachSet.factory( tts0 ).union( tts1 ).union( tts2 ); + + + RefEdge edgeFromLabel = + new RefEdge( lnParam, // src + hrnPrimary, // dst + typeParam, // type + null, // field + false, // special param initial (not needed on label->node) + betaSoup ); // reachability + edgeFromLabel.tainedBy(paramIndex); + addRefEdge( lnParam, hrnPrimary, edgeFromLabel ); + + RefEdge edgeFromLabelQ = + new RefEdge( lnParamQ, // src + hrnPrimary, // dst + null, // type + null, // field + false, // special param initial (not needed on label->node) + betaSoup ); // reachability + edgeFromLabelQ.tainedBy(paramIndex); + addRefEdge( lnParamQ, hrnPrimary, edgeFromLabelQ ); + + RefEdge edgeAliased2Primary = + new RefEdge( hrnAliasBlob, // src + hrnPrimary, // dst + null, // match all types + null, // match all fields + true, // special param initial + betaSoup ); // reachability + addRefEdge( hrnAliasBlob, hrnPrimary, edgeAliased2Primary ); + + RefEdge edgeFromLabelR = + new RefEdge( lnParamR, // src + hrnAliasBlob, // dst + null, // type + null, // field + false, // special param initial (not needed on label->node) + betaSoup ); // reachability + edgeFromLabelR.tainedBy(paramIndex); + addRefEdge( lnParamR, hrnAliasBlob, edgeFromLabelR ); + } + + + public void addParam2ParamAliasEdges( FlatMethod fm, + Set aliasedParamIndices ) { + + VariableNode lnAliased = getVariableNodeFromTemp( tdAliasBlob ); + + // the lnAliased should always only reference one node, and that + // heap region node is the aliased param blob + assert lnAliased.getNumReferencees() == 1; + HeapRegionNode hrnAliasBlob = lnAliased.iteratorToReferencees().next().getDst(); + Integer idAliased = hrnAliasBlob.getID(); + + + ReachTuple ttAliased = new ReachTuple( idAliased, + true, // multi-object + ReachTuple.ARITY_ONE ).makeCanonical(); + + + Iterator apItrI = aliasedParamIndices.iterator(); + while( apItrI.hasNext() ) { + Integer i = apItrI.next(); + TempDescriptor tdParamI = fm.getParameter( i ); + TypeDescriptor typeI = tdParamI.getType(); + VariableNode lnParamI = getVariableNodeFromTemp( tdParamI ); + + Integer idPrimaryI = paramIndex2idPrimary.get( i ); + assert idPrimaryI != null; + HeapRegionNode primaryI = id2hrn.get( idPrimaryI ); + assert primaryI != null; + + ReachTuple ttPrimaryI = new ReachTuple( idPrimaryI, + false, // multi-object + ReachTuple.ARITY_ONE ).makeCanonical(); + + ReachState ttsI = new ReachTupleSet( ttPrimaryI ).makeCanonical(); + ReachState ttsA = new ReachTupleSet( ttAliased ).makeCanonical(); + ReachState ttsIA = new ReachTupleSet( ttPrimaryI ).makeCanonical().union( ttAliased ); + ReachSet betaSoup = ReachSet.factory( ttsI ).union( ttsA ).union( ttsIA ); + + + // calculate whether fields of this aliased parameter are able to + // reference its own primary object, the blob, or other parameter's + // primary objects! + Set primary2primaryFields = new HashSet(); + Set primary2secondaryFields = new HashSet(); + + // there might be an element reference for array types + if( typeI.isArray() ) { + // only bother with this if the dereferenced type can + // affect reachability + TypeDescriptor typeDeref = typeI.dereference(); + + + + ///////////////////////////////////////////////////////////// + // NOTE! For the KMeans benchmark a parameter of type float + // array, which has an immutable dereferenced type, is causing + // this assertion to fail. I'm commenting it out for now which + // is safe, because it allows aliasing where no aliasing can occur, + // so it can only get a worse-but-not-wrong answer. FIX! + ///////////////////////////////////////////////////////////// + // for this parameter to be aliased the following must be true + //assert !typeDeref.isImmutable() || typeDeref.isArray(); + + + + primary2secondaryFields.add( + DisjointAnalysis.getArrayField( typeDeref ) + ); + + // also handle a special case where an array of objects + // can point back to the array, which is an object! + if( typeI .toPrettyString().equals( "Object[]" ) && + typeDeref.toPrettyString().equals( "Object" ) ) { + primary2primaryFields.add( + DisjointAnalysis.getArrayField( typeDeref ) + ); + } + } + + // there might be member references for class types + if( typeI.isClass() ) { + ClassDescriptor cd = typeI.getClassDesc(); + while( cd != null ) { + + Iterator fieldItr = cd.getFields(); + while( fieldItr.hasNext() ) { + + FieldDescriptor fd = (FieldDescriptor) fieldItr.next(); + TypeDescriptor typeField = fd.getType(); + assert typeField != null; + + if( !typeField.isImmutable() || typeField.isArray() ) { + primary2secondaryFields.add( fd ); + } + + if( typeUtil.isSuperorType( typeField, typeI ) ) { + primary2primaryFields.add( fd ); + } + } + + cd = cd.getSuperDesc(); + } + } + + Iterator fieldItr = primary2primaryFields.iterator(); + while( fieldItr.hasNext() ) { + FieldDescriptor fd = fieldItr.next(); + + RefEdge edgePrimaryReflexive = + new RefEdge( primaryI, // src + primaryI, // dst + fd.getType(), // type + fd.getSymbol(), // field + true, // special param initial + betaSoup ); // reachability + addRefEdge( primaryI, primaryI, edgePrimaryReflexive ); + } + + fieldItr = primary2secondaryFields.iterator(); + while( fieldItr.hasNext() ) { + FieldDescriptor fd = fieldItr.next(); + TypeDescriptor typeField = fd.getType(); + assert typeField != null; + + RefEdge edgePrimary2Secondary = + new RefEdge( primaryI, // src + hrnAliasBlob, // dst + fd.getType(), // type + fd.getSymbol(), // field + true, // special param initial + betaSoup ); // reachability + addRefEdge( primaryI, hrnAliasBlob, edgePrimary2Secondary ); + + // ask whether these fields might match any of the other aliased + // parameters and make those edges too + Iterator apItrJ = aliasedParamIndices.iterator(); + while( apItrJ.hasNext() ) { + Integer j = apItrJ.next(); + TempDescriptor tdParamJ = fm.getParameter( j ); + TypeDescriptor typeJ = tdParamJ.getType(); + + if( !i.equals( j ) && typeUtil.isSuperorType( typeField, typeJ ) ) { + + Integer idPrimaryJ = paramIndex2idPrimary.get( j ); + assert idPrimaryJ != null; + HeapRegionNode primaryJ = id2hrn.get( idPrimaryJ ); + assert primaryJ != null; + + ReachTuple ttPrimaryJ = new ReachTuple( idPrimaryJ, + false, // multi-object + ReachTuple.ARITY_ONE ).makeCanonical(); + + ReachState ttsJ = new ReachTupleSet( ttPrimaryJ ).makeCanonical(); + ReachState ttsIJ = ttsI.union( ttsJ ); + ReachState ttsAJ = ttsA.union( ttsJ ); + ReachState ttsIAJ = ttsIA.union( ttsJ ); + ReachSet betaSoupWJ = ReachSet.factory( ttsJ ).union( ttsIJ ).union( ttsAJ ).union( ttsIAJ ); + + RefEdge edgePrimaryI2PrimaryJ = + new RefEdge( primaryI, // src + primaryJ, // dst + fd.getType(), // type + fd.getSymbol(), // field + true, // special param initial + betaSoupWJ ); // reachability + addRefEdge( primaryI, primaryJ, edgePrimaryI2PrimaryJ ); + } + } + } + + + // look at whether aliased parameters i and j can + // possibly be the same primary object, add edges + Iterator apItrJ = aliasedParamIndices.iterator(); + while( apItrJ.hasNext() ) { + Integer j = apItrJ.next(); + TempDescriptor tdParamJ = fm.getParameter( j ); + TypeDescriptor typeJ = tdParamJ.getType(); + VariableNode lnParamJ = getVariableNodeFromTemp( tdParamJ ); + + if( !i.equals( j ) && typeUtil.isSuperorType( typeI, typeJ ) ) { + + Integer idPrimaryJ = paramIndex2idPrimary.get( j ); + assert idPrimaryJ != null; + HeapRegionNode primaryJ = id2hrn.get( idPrimaryJ ); + assert primaryJ != null; + + RefEdge lnJ2PrimaryJ = lnParamJ.getReferenceTo( primaryJ, + tdParamJ.getType(), + null ); + assert lnJ2PrimaryJ != null; + + RefEdge lnI2PrimaryJ = lnJ2PrimaryJ.copy(); + lnI2PrimaryJ.setSrc( lnParamI ); + lnI2PrimaryJ.setType( tdParamI.getType() ); + lnI2PrimaryJ.tainedBy(new Integer(j)); + addRefEdge( lnParamI, primaryJ, lnI2PrimaryJ ); + } + } + } + } + + public void prepareParamTokenMaps( FlatMethod fm ) { + + // always add the bogus mappings that are used to + // rewrite "with respect to no parameter" + paramTokenPrimary2paramIndex.put( bogusToken, bogusIndex ); + paramIndex2paramTokenPrimary.put( bogusIndex, bogusToken ); + + paramTokenSecondary2paramIndex.put( bogusToken, bogusIndex ); + paramIndex2paramTokenSecondary.put( bogusIndex, bogusToken ); + paramTokenSecondaryPlus2paramIndex.put( bogusTokenPlus, bogusIndex ); + paramIndex2paramTokenSecondaryPlus.put( bogusIndex, bogusTokenPlus ); + paramTokenSecondaryStar2paramIndex.put( bogusTokenStar, bogusIndex ); + paramIndex2paramTokenSecondaryStar.put( bogusIndex, bogusTokenStar ); + + for( int i = 0; i < fm.numParameters(); ++i ) { + Integer paramIndex = new Integer( i ); + + // immutable objects have no primary regions + if( paramIndex2idPrimary.containsKey( paramIndex ) ) { + Integer idPrimary = paramIndex2idPrimary.get( paramIndex ); + + assert id2hrn.containsKey( idPrimary ); + HeapRegionNode hrnPrimary = id2hrn.get( idPrimary ); + + ReachTuple p_i = new ReachTuple( hrnPrimary.getID(), + false, // multiple-object? + ReachTuple.ARITY_ONE ).makeCanonical(); + paramTokenPrimary2paramIndex.put( p_i, paramIndex ); + paramIndex2paramTokenPrimary.put( paramIndex, p_i ); + } + + // any parameter object, by type, may have no secondary region + if( paramIndex2idSecondary.containsKey( paramIndex ) ) { + Integer idSecondary = paramIndex2idSecondary.get( paramIndex ); + + assert id2hrn.containsKey( idSecondary ); + HeapRegionNode hrnSecondary = id2hrn.get( idSecondary ); + + ReachTuple s_i = new ReachTuple( hrnSecondary.getID(), + true, // multiple-object? + ReachTuple.ARITY_ONE ).makeCanonical(); + paramTokenSecondary2paramIndex.put( s_i, paramIndex ); + paramIndex2paramTokenSecondary.put( paramIndex, s_i ); + + ReachTuple s_i_plus = new ReachTuple( hrnSecondary.getID(), + true, // multiple-object? + ReachTuple.ARITY_ONEORMORE ).makeCanonical(); + paramTokenSecondaryPlus2paramIndex.put( s_i_plus, paramIndex ); + paramIndex2paramTokenSecondaryPlus.put( paramIndex, s_i_plus ); + + ReachTuple s_i_star = new ReachTuple( hrnSecondary.getID(), + true, // multiple-object? + ReachTuple.ARITY_ZEROORMORE ).makeCanonical(); + paramTokenSecondaryStar2paramIndex.put( s_i_star, paramIndex ); + paramIndex2paramTokenSecondaryStar.put( paramIndex, s_i_star ); + } + } + } + + + + public void assignReturnEqualToTemp(TempDescriptor x) { + + VariableNode lnR = getVariableNodeFromTemp(tdReturn); + VariableNode lnX = getVariableNodeFromTemp(x); + + clearRefEdgesFrom(lnR, null, null, true); + + Iterator itrXhrn = lnX.iteratorToReferencees(); + while( itrXhrn.hasNext() ) { + RefEdge edgeX = itrXhrn.next(); + HeapRegionNode referencee = edgeX.getDst(); + RefEdge edgeNew = edgeX.copy(); + edgeNew.setSrc(lnR); + + addRefEdge(lnR, referencee, edgeNew); + } + } + + + public void assignTempEqualToNewAlloc(TempDescriptor x, + AllocSite as) { + assert x != null; + assert as != null; + + age( as ); + + // after the age operation the newest (or zero-ith oldest) + // node associated with the allocation site should have + // no references to it as if it were a newly allocated + // heap region + Integer idNewest = as.getIthOldest( 0 ); + HeapRegionNode hrnNewest = id2hrn.get( idNewest ); + assert hrnNewest != null; + + VariableNode lnX = getVariableNodeFromTemp( x ); + clearRefEdgesFrom( lnX, null, null, true ); + + // make a new reference to allocated node + TypeDescriptor type = as.getType(); + RefEdge edgeNew = + new RefEdge( lnX, // source + hrnNewest, // dest + type, // type + null, // field name + false, // is initial param + hrnNewest.getAlpha() // beta + ); + + addRefEdge( lnX, hrnNewest, edgeNew ); + } + + + // use the allocation site (unique to entire analysis) to + // locate the heap region nodes in this ownership graph + // that should be aged. The process models the allocation + // of new objects and collects all the oldest allocations + // in a summary node to allow for a finite analysis + // + // There is an additional property of this method. After + // running it on a particular ownership graph (many graphs + // may have heap regions related to the same allocation site) + // the heap region node objects in this ownership graph will be + // allocated. Therefore, after aging a graph for an allocation + // site, attempts to retrieve the heap region nodes using the + // integer id's contained in the allocation site should always + // return non-null heap regions. + public void age(AllocSite as) { + + // aging adds this allocation site to the graph's + // list of sites that exist in the graph, or does + // nothing if the site is already in the list + allocSites.add(as); + + // get the summary node for the allocation site in the context + // of this particular ownership graph + HeapRegionNode hrnSummary = getSummaryNode(as); + + // merge oldest node into summary + Integer idK = as.getOldest(); + HeapRegionNode hrnK = id2hrn.get(idK); + mergeIntoSummary(hrnK, hrnSummary); + + // move down the line of heap region nodes + // clobbering the ith and transferring all references + // to and from i-1 to node i. Note that this clobbers + // the oldest node (hrnK) that was just merged into + // the summary + for( int i = allocationDepth - 1; i > 0; --i ) { + + // move references from the i-1 oldest to the ith oldest + Integer idIth = as.getIthOldest(i); + HeapRegionNode hrnI = id2hrn.get(idIth); + Integer idImin1th = as.getIthOldest(i - 1); + HeapRegionNode hrnImin1 = id2hrn.get(idImin1th); + + transferOnto(hrnImin1, hrnI); + } + + // as stated above, the newest node should have had its + // references moved over to the second oldest, so we wipe newest + // in preparation for being the new object to assign something to + Integer id0th = as.getIthOldest(0); + HeapRegionNode hrn0 = id2hrn.get(id0th); + assert hrn0 != null; + + // clear all references in and out of newest node + clearRefEdgesFrom(hrn0, null, null, true); + clearRefEdgesTo(hrn0, null, null, true); + + + // now tokens in reachability sets need to "age" also + Iterator itrAllVariableNodes = td2vn.entrySet().iterator(); + while( itrAllVariableNodes.hasNext() ) { + Map.Entry me = (Map.Entry)itrAllVariableNodes.next(); + VariableNode ln = (VariableNode) me.getValue(); + + Iterator itrEdges = ln.iteratorToReferencees(); + while( itrEdges.hasNext() ) { + ageTokens(as, itrEdges.next() ); + } + } + + Iterator itrAllHRNodes = id2hrn.entrySet().iterator(); + while( itrAllHRNodes.hasNext() ) { + Map.Entry me = (Map.Entry)itrAllHRNodes.next(); + HeapRegionNode hrnToAge = (HeapRegionNode) me.getValue(); + + ageTokens(as, hrnToAge); + + Iterator itrEdges = hrnToAge.iteratorToReferencees(); + while( itrEdges.hasNext() ) { + ageTokens(as, itrEdges.next() ); + } + } + + + // after tokens have been aged, reset newest node's reachability + if( hrn0.isFlagged() ) { + hrn0.setAlpha(new ReachSet( + new ReachState( + new ReachTuple(hrn0).makeCanonical() + ).makeCanonical() + ).makeCanonical() + ); + } else { + hrn0.setAlpha(new ReachSet( + new ReachState().makeCanonical() + ).makeCanonical() + ); + } + } + + + protected HeapRegionNode getSummaryNode(AllocSite as) { + + Integer idSummary = as.getSummary(); + HeapRegionNode hrnSummary = id2hrn.get(idSummary); + + // If this is null then we haven't touched this allocation site + // in the context of the current ownership graph, so allocate + // heap region nodes appropriate for the entire allocation site. + // This should only happen once per ownership graph per allocation site, + // and a particular integer id can be used to locate the heap region + // in different ownership graphs that represents the same part of an + // allocation site. + if( hrnSummary == null ) { + + boolean hasFlags = false; + if( as.getType().isClass() ) { + hasFlags = as.getType().getClassDesc().hasFlags(); + } + + if(as.getFlag()){ + hasFlags=as.getFlag(); + } + + hrnSummary = createNewHeapRegionNode(idSummary, // id or null to generate a new one + false, // single object? + true, // summary? + hasFlags, // flagged? + false, // is a parameter? + as.getType(), // type + as, // allocation site + null, // reachability set + as.toStringForDOT() + "\\nsummary", + generateUniqueIdentifier(as,0,true)); + + for( int i = 0; i < as.getAllocationDepth(); ++i ) { + Integer idIth = as.getIthOldest(i); + assert !id2hrn.containsKey(idIth); + createNewHeapRegionNode(idIth, // id or null to generate a new one + true, // single object? + false, // summary? + hasFlags, // flagged? + false, // is a parameter? + as.getType(), // type + as, // allocation site + null, // reachability set + as.toStringForDOT() + "\\n" + i + " oldest", + generateUniqueIdentifier(as,i,false)); + } + } + + return hrnSummary; + } + + + protected HeapRegionNode getShadowSummaryNode(AllocSite as) { + + Integer idShadowSummary = as.getSummaryShadow(); + HeapRegionNode hrnShadowSummary = id2hrn.get(idShadowSummary); + + if( hrnShadowSummary == null ) { + + boolean hasFlags = false; + if( as.getType().isClass() ) { + hasFlags = as.getType().getClassDesc().hasFlags(); + } + + hrnShadowSummary = createNewHeapRegionNode(idShadowSummary, // id or null to generate a new one + false, // single object? + true, // summary? + hasFlags, // flagged? + false, // is a parameter? + as.getType(), // type + as, // allocation site + null, // reachability set + as + "\\n" + as.getType() + "\\nshadowSum", + ""); + + for( int i = 0; i < as.getAllocationDepth(); ++i ) { + Integer idShadowIth = as.getIthOldestShadow(i); + assert !id2hrn.containsKey(idShadowIth); + createNewHeapRegionNode(idShadowIth, // id or null to generate a new one + true, // single object? + false, // summary? + hasFlags, // flagged? + false, // is a parameter? + as.getType(), // type + as, // allocation site + null, // reachability set + as + "\\n" + as.getType() + "\\n" + i + " shadow", + ""); + } + } + + return hrnShadowSummary; + } + + + protected void mergeIntoSummary(HeapRegionNode hrn, HeapRegionNode hrnSummary) { + assert hrnSummary.isNewSummary(); + + // transfer references _from_ hrn over to hrnSummary + Iterator itrReferencee = hrn.iteratorToReferencees(); + while( itrReferencee.hasNext() ) { + RefEdge edge = itrReferencee.next(); + RefEdge edgeMerged = edge.copy(); + edgeMerged.setSrc(hrnSummary); + + HeapRegionNode hrnReferencee = edge.getDst(); + RefEdge edgeSummary = hrnSummary.getReferenceTo(hrnReferencee, + edge.getType(), + edge.getField() ); + + if( edgeSummary == null ) { + // the merge is trivial, nothing to be done + } else { + // otherwise an edge from the referencer to hrnSummary exists already + // and the edge referencer->hrn should be merged with it + edgeMerged.setBeta(edgeMerged.getBeta().union(edgeSummary.getBeta() ) ); + } + + addRefEdge(hrnSummary, hrnReferencee, edgeMerged); + } + + // next transfer references _to_ hrn over to hrnSummary + Iterator itrReferencer = hrn.iteratorToReferencers(); + while( itrReferencer.hasNext() ) { + RefEdge edge = itrReferencer.next(); + RefEdge edgeMerged = edge.copy(); + edgeMerged.setDst(hrnSummary); + + RefSrcNode onReferencer = edge.getSrc(); + RefEdge edgeSummary = onReferencer.getReferenceTo(hrnSummary, + edge.getType(), + edge.getField() ); + + if( edgeSummary == null ) { + // the merge is trivial, nothing to be done + } else { + // otherwise an edge from the referencer to alpha_S exists already + // and the edge referencer->alpha_K should be merged with it + edgeMerged.setBeta(edgeMerged.getBeta().union(edgeSummary.getBeta() ) ); + } + + addRefEdge(onReferencer, hrnSummary, edgeMerged); + } + + // then merge hrn reachability into hrnSummary + hrnSummary.setAlpha(hrnSummary.getAlpha().union(hrn.getAlpha() ) ); + } + + + protected void transferOnto(HeapRegionNode hrnA, HeapRegionNode hrnB) { + + // clear references in and out of node b + clearRefEdgesFrom(hrnB, null, null, true); + clearRefEdgesTo(hrnB, null, null, true); + + // copy each edge in and out of A to B + Iterator itrReferencee = hrnA.iteratorToReferencees(); + while( itrReferencee.hasNext() ) { + RefEdge edge = itrReferencee.next(); + HeapRegionNode hrnReferencee = edge.getDst(); + RefEdge edgeNew = edge.copy(); + edgeNew.setSrc(hrnB); + + addRefEdge(hrnB, hrnReferencee, edgeNew); + } + + Iterator itrReferencer = hrnA.iteratorToReferencers(); + while( itrReferencer.hasNext() ) { + RefEdge edge = itrReferencer.next(); + RefSrcNode onReferencer = edge.getSrc(); + RefEdge edgeNew = edge.copy(); + edgeNew.setDst(hrnB); + + addRefEdge(onReferencer, hrnB, edgeNew); + } + + // replace hrnB reachability with hrnA's + hrnB.setAlpha(hrnA.getAlpha() ); + } + + + protected void ageTokens(AllocSite as, RefEdge edge) { + edge.setBeta(edge.getBeta().ageTokens(as) ); + } + + protected void ageTokens(AllocSite as, HeapRegionNode hrn) { + hrn.setAlpha(hrn.getAlpha().ageTokens(as) ); + } + + + + protected void propagateTokensOverNodes(HeapRegionNode nPrime, + ChangeSet c0, + HashSet nodesWithNewAlpha, + HashSet edgesWithNewBeta) { + + HashSet todoNodes + = new HashSet(); + todoNodes.add(nPrime); + + HashSet todoEdges + = new HashSet(); + + Hashtable nodePlannedChanges + = new Hashtable(); + nodePlannedChanges.put(nPrime, c0); + + Hashtable edgePlannedChanges + = new Hashtable(); + + // first propagate change sets everywhere they can go + while( !todoNodes.isEmpty() ) { + HeapRegionNode n = todoNodes.iterator().next(); + ChangeSet C = nodePlannedChanges.get(n); + + Iterator referItr = n.iteratorToReferencers(); + while( referItr.hasNext() ) { + RefEdge edge = referItr.next(); + todoEdges.add(edge); + + if( !edgePlannedChanges.containsKey(edge) ) { + edgePlannedChanges.put(edge, new ChangeSet().makeCanonical() ); + } + + edgePlannedChanges.put(edge, edgePlannedChanges.get(edge).union(C) ); + } + + Iterator refeeItr = n.iteratorToReferencees(); + while( refeeItr.hasNext() ) { + RefEdge edgeF = refeeItr.next(); + HeapRegionNode m = edgeF.getDst(); + + ChangeSet changesToPass = new ChangeSet().makeCanonical(); + + Iterator itrCprime = C.iterator(); + while( itrCprime.hasNext() ) { + ChangeTuple c = itrCprime.next(); + if( edgeF.getBeta().contains( c.getSetToMatch() ) ) { + changesToPass = changesToPass.union(c); + } + } + + if( !changesToPass.isEmpty() ) { + if( !nodePlannedChanges.containsKey(m) ) { + nodePlannedChanges.put(m, new ChangeSet().makeCanonical() ); + } + + ChangeSet currentChanges = nodePlannedChanges.get(m); + + if( !changesToPass.isSubset(currentChanges) ) { + + nodePlannedChanges.put(m, currentChanges.union(changesToPass) ); + todoNodes.add(m); + } + } + } + + todoNodes.remove(n); + } + + // then apply all of the changes for each node at once + Iterator itrMap = nodePlannedChanges.entrySet().iterator(); + while( itrMap.hasNext() ) { + Map.Entry me = (Map.Entry) itrMap.next(); + HeapRegionNode n = (HeapRegionNode) me.getKey(); + ChangeSet C = (ChangeSet) me.getValue(); + + // this propagation step is with respect to one change, + // so we capture the full change from the old alpha: + ReachSet localDelta = n.getAlpha().applyChangeSet( C, true ); + + // but this propagation may be only one of many concurrent + // possible changes, so keep a running union with the node's + // partially updated new alpha set + n.setAlphaNew( n.getAlphaNew().union( localDelta ) ); + + nodesWithNewAlpha.add( n ); + } + + propagateTokensOverEdges(todoEdges, edgePlannedChanges, edgesWithNewBeta); + } + + + protected void propagateTokensOverEdges( + HashSet todoEdges, + Hashtable edgePlannedChanges, + HashSet edgesWithNewBeta) { + + // first propagate all change tuples everywhere they can go + while( !todoEdges.isEmpty() ) { + RefEdge edgeE = todoEdges.iterator().next(); + todoEdges.remove(edgeE); + + if( !edgePlannedChanges.containsKey(edgeE) ) { + edgePlannedChanges.put(edgeE, new ChangeSet().makeCanonical() ); + } + + ChangeSet C = edgePlannedChanges.get(edgeE); + + ChangeSet changesToPass = new ChangeSet().makeCanonical(); + + Iterator itrC = C.iterator(); + while( itrC.hasNext() ) { + ChangeTuple c = itrC.next(); + if( edgeE.getBeta().contains( c.getSetToMatch() ) ) { + changesToPass = changesToPass.union(c); + } + } + + RefSrcNode onSrc = edgeE.getSrc(); + + if( !changesToPass.isEmpty() && onSrc instanceof HeapRegionNode ) { + HeapRegionNode n = (HeapRegionNode) onSrc; + + Iterator referItr = n.iteratorToReferencers(); + while( referItr.hasNext() ) { + RefEdge edgeF = referItr.next(); + + if( !edgePlannedChanges.containsKey(edgeF) ) { + edgePlannedChanges.put(edgeF, new ChangeSet().makeCanonical() ); + } + + ChangeSet currentChanges = edgePlannedChanges.get(edgeF); + + if( !changesToPass.isSubset(currentChanges) ) { + todoEdges.add(edgeF); + edgePlannedChanges.put(edgeF, currentChanges.union(changesToPass) ); + } + } + } + } + + // then apply all of the changes for each edge at once + Iterator itrMap = edgePlannedChanges.entrySet().iterator(); + while( itrMap.hasNext() ) { + Map.Entry me = (Map.Entry) itrMap.next(); + RefEdge e = (RefEdge) me.getKey(); + ChangeSet C = (ChangeSet) me.getValue(); + + // this propagation step is with respect to one change, + // so we capture the full change from the old beta: + ReachSet localDelta = e.getBeta().applyChangeSet( C, true ); + + // but this propagation may be only one of many concurrent + // possible changes, so keep a running union with the edge's + // partially updated new beta set + e.setBetaNew( e.getBetaNew().union( localDelta ) ); + + edgesWithNewBeta.add( e ); + } + } + + + public Set calculateAliasedParamSet( FlatCall fc, + boolean isStatic, + FlatMethod fm ) { + + Hashtable paramIndex2ln = + new Hashtable(); + + Hashtable > paramIndex2reachableCallerNodes = + new Hashtable >(); + + for( int i = 0; i < fm.numParameters(); ++i ) { + Integer paramIndex = new Integer( i ); + TempDescriptor tdParam = fm.getParameter( i ); + TypeDescriptor typeParam = tdParam.getType(); + + if( typeParam.isImmutable() && !typeParam.isArray() ) { + // don't bother with this primitive parameter, it + // cannot affect reachability + continue; + } + + // now depending on whether the callee is static or not + // we need to account for a "this" argument in order to + // find the matching argument in the caller context + TempDescriptor argTemp_i = fc.getArgMatchingParamIndex( fm, paramIndex ); + + VariableNode argLabel_i = getVariableNodeFromTemp(argTemp_i); + paramIndex2ln.put(paramIndex, argLabel_i); + } + + Iterator lnArgItr = paramIndex2ln.entrySet().iterator(); + while( lnArgItr.hasNext() ) { + Map.Entry me = (Map.Entry)lnArgItr.next(); + Integer index = (Integer) me.getKey(); + VariableNode lnArg_i = (VariableNode) me.getValue(); + + HashSet reachableNodes = new HashSet(); + HashSet todoNodes = new HashSet(); + + // to find all reachable nodes, start with label referencees + Iterator edgeArgItr = lnArg_i.iteratorToReferencees(); + while( edgeArgItr.hasNext() ) { + RefEdge edge = edgeArgItr.next(); + todoNodes.add( edge.getDst() ); + } + + // then follow links until all reachable nodes have been found + while( !todoNodes.isEmpty() ) { + HeapRegionNode hrn = todoNodes.iterator().next(); + todoNodes.remove(hrn); + reachableNodes.add(hrn); + + Iterator edgeItr = hrn.iteratorToReferencees(); + while( edgeItr.hasNext() ) { + RefEdge edge = edgeItr.next(); + + if( !reachableNodes.contains(edge.getDst() ) ) { + todoNodes.add(edge.getDst() ); + } + } + } + + // save for later + paramIndex2reachableCallerNodes.put(index, reachableNodes); + } + + Set aliasedIndices = new HashSet(); + + // check for arguments that are aliased + for( int i = 0; i < fm.numParameters(); ++i ) { + for( int j = 0; j < i; ++j ) { + HashSet s1 = paramIndex2reachableCallerNodes.get( i ); + HashSet s2 = paramIndex2reachableCallerNodes.get( j ); + + // some parameters are immutable or primitive, so skip em + if( s1 == null || s2 == null ) { + continue; + } + + Set intersection = new HashSet(s1); + intersection.retainAll(s2); + + if( !intersection.isEmpty() ) { + aliasedIndices.add( new Integer( i ) ); + aliasedIndices.add( new Integer( j ) ); + } + } + } + + return aliasedIndices; + } + + + private String makeMapKey( Integer i, Integer j, String field ) { + return i+","+j+","+field; + } + + private String makeMapKey( Integer i, String field ) { + return i+","+field; + } + + // these hashtables are used during the mapping procedure to say that + // with respect to some argument i there is an edge placed into some + // category for mapping with respect to another argument index j + // so the key into the hashtable is i, the value is a two-element vector + // that contains in 0 the edge and in 1 the Integer index j + private void ensureEmptyEdgeIndexPair( Hashtable< Integer, Set > edge_index_pairs, + Integer indexI ) { + + Set ei = edge_index_pairs.get( indexI ); + if( ei == null ) { + ei = new HashSet(); + } + edge_index_pairs.put( indexI, ei ); + } + + private void addEdgeIndexPair( Hashtable< Integer, Set > edge_index_pairs, + Integer indexI, + RefEdge edge, + Integer indexJ ) { + + Vector v = new Vector(); v.setSize( 2 ); + v.set( 0 , edge ); + v.set( 1 , indexJ ); + Set ei = edge_index_pairs.get( indexI ); + if( ei == null ) { + ei = new HashSet(); + } + ei.add( v ); + edge_index_pairs.put( indexI, ei ); + } + + private ReachSet funcScriptR( ReachSet rsIn, + ReachGraph ogCallee, + MethodContext mc ) { + + ReachSet rsOut = new ReachSet( rsIn ); + + Iterator itr = ogCallee.paramIndex2paramTokenPrimary.entrySet().iterator(); + while( itr.hasNext() ) { + Map.Entry me = (Map.Entry) itr.next(); + Integer i = (Integer) me.getKey(); + ReachTuple p_i = (ReachTuple) me.getValue(); + ReachTuple s_i = ogCallee.paramIndex2paramTokenSecondary.get( i ); + + // skip this if there is no secondary token or the parameter + // is part of the aliasing context + if( s_i == null || mc.getAliasedParamIndices().contains( i ) ) { + continue; + } + + rsOut = rsOut.removeTokenAIfTokenB( p_i, s_i ); + } + + return rsOut; + } + + // detects strong updates to the primary parameter object and + // effects the removal of old edges in the calling graph + private void effectCalleeStrongUpdates( Integer paramIndex, + ReachGraph ogCallee, + HeapRegionNode hrnCaller + ) { + Integer idPrimary = ogCallee.paramIndex2idPrimary.get( paramIndex ); + assert idPrimary != null; + + HeapRegionNode hrnPrimary = ogCallee.id2hrn.get( idPrimary ); + assert hrnPrimary != null; + + TypeDescriptor typeParam = hrnPrimary.getType(); + assert typeParam.isClass(); + + Set fieldNamesToRemove = new HashSet(); + + ClassDescriptor cd = typeParam.getClassDesc(); + while( cd != null ) { + + Iterator fieldItr = cd.getFields(); + while( fieldItr.hasNext() ) { + + FieldDescriptor fd = (FieldDescriptor) fieldItr.next(); + TypeDescriptor typeField = fd.getType(); + assert typeField != null; + + if( ogCallee.hasFieldBeenUpdated( hrnPrimary, fd.getSymbol() ) ) { + clearRefEdgesFrom( hrnCaller, fd.getType(), fd.getSymbol(), false ); + } + } + + cd = cd.getSuperDesc(); + } + } + + private boolean hasFieldBeenUpdated( HeapRegionNode hrnPrimary, String field ) { + + Iterator itr = hrnPrimary.iteratorToReferencees(); + while( itr.hasNext() ) { + RefEdge e = itr.next(); + if( e.fieldEquals( field ) && e.isInitialParam() ) { + return false; + } + } + + return true; + } + + // resolveMethodCall() is used to incorporate a callee graph's effects into + // *this* graph, which is the caller. This method can also be used, after + // the entire analysis is complete, to perform parameter decomposition for + // a given call chain. + public void resolveMethodCall(FlatCall fc, // call site in caller method + boolean isStatic, // whether it is a static method + FlatMethod fm, // the callee method (when virtual, can be many) + ReachGraph ogCallee, // the callee's current ownership graph + MethodContext mc, // the aliasing context for this call + ParameterDecomposition pd // if this is not null, we're calling after analysis + ) { + + if( debugCallMap && + mc.getDescriptor().getSymbol().equals( debugCaller ) && + fm.getMethod().getSymbol().equals( debugCallee ) + ) { + + try { + writeGraph("debug1BeforeCall", + true, // write labels (variables) + true, // selectively hide intermediate temp vars + true, // prune unreachable heap regions + false, // show back edges to confirm graph validity + false, // show parameter indices (unmaintained!) + true, // hide subset reachability states + true); // hide edge taints + + ogCallee.writeGraph("debug0Callee", + true, // write labels (variables) + true, // selectively hide intermediate temp vars + true, // prune unreachable heap regions + false, // show back edges to confirm graph validity + false, // show parameter indices (unmaintained!) + true, // hide subset reachability states + true); // hide edge taints + } catch( IOException e ) {} + + System.out.println( " "+mc+" is calling "+fm ); + } + + + + // define rewrite rules and other structures to organize data by parameter/argument index + Hashtable paramIndex2rewriteH_p = new Hashtable(); + Hashtable paramIndex2rewriteH_s = new Hashtable(); + + Hashtable paramIndex2rewriteJ_p2p = new Hashtable(); // select( i, j, f ) + Hashtable paramIndex2rewriteJ_p2s = new Hashtable(); // select( i, f ) + Hashtable paramIndex2rewriteJ_s2p = new Hashtable(); + Hashtable paramIndex2rewriteJ_s2s = new Hashtable(); + + Hashtable paramIndex2rewriteK_p = new Hashtable(); + Hashtable paramIndex2rewriteK_p2 = new Hashtable(); + Hashtable paramIndex2rewriteK_s = new Hashtable(); + + Hashtable paramIndex2rewrite_d_p = new Hashtable(); + Hashtable paramIndex2rewrite_d_s = new Hashtable(); + + Hashtable paramIndex2rewriteD = new Hashtable(); + + + Hashtable paramIndex2ln = new Hashtable(); + + + paramIndex2rewriteH_p.put( bogusIndex, rsIdentity ); + paramIndex2rewriteH_s.put( bogusIndex, rsIdentity ); + + paramIndex2rewriteJ_p2p.put( bogusIndex.toString(), rsIdentity ); + paramIndex2rewriteJ_p2s.put( bogusIndex.toString(), rsIdentity ); + paramIndex2rewriteJ_s2p.put( bogusIndex, rsIdentity ); + paramIndex2rewriteJ_s2s.put( bogusIndex, rsIdentity ); + + + for( int i = 0; i < fm.numParameters(); ++i ) { + Integer paramIndex = new Integer(i); + + if( !ogCallee.paramIndex2idPrimary.containsKey( paramIndex ) ) { + // skip this immutable parameter + continue; + } + + // setup H (primary) + Integer idPrimary = ogCallee.paramIndex2idPrimary.get( paramIndex ); + assert ogCallee.id2hrn.containsKey( idPrimary ); + HeapRegionNode hrnPrimary = ogCallee.id2hrn.get( idPrimary ); + assert hrnPrimary != null; + paramIndex2rewriteH_p.put( paramIndex, toShadowTokens( ogCallee, hrnPrimary.getAlpha() ) ); + + // setup J (primary->X) + Iterator p2xItr = hrnPrimary.iteratorToReferencees(); + while( p2xItr.hasNext() ) { + RefEdge p2xEdge = p2xItr.next(); + + // we only care about initial parameter edges here + if( !p2xEdge.isInitialParam() ) { continue; } + + HeapRegionNode hrnDst = p2xEdge.getDst(); + + if( ogCallee.idPrimary2paramIndexSet.containsKey( hrnDst.getID() ) ) { + Iterator jItr = ogCallee.idPrimary2paramIndexSet.get( hrnDst.getID() ).iterator(); + while( jItr.hasNext() ) { + Integer j = jItr.next(); + paramIndex2rewriteJ_p2p.put( makeMapKey( i, j, p2xEdge.getField() ), + toShadowTokens( ogCallee, p2xEdge.getBeta() ) ); + } + + } else { + assert ogCallee.idSecondary2paramIndexSet.containsKey( hrnDst.getID() ); + paramIndex2rewriteJ_p2s.put( makeMapKey( i, p2xEdge.getField() ), + toShadowTokens( ogCallee, p2xEdge.getBeta() ) ); + } + } + + // setup K (primary) + TempDescriptor tdParamQ = ogCallee.paramIndex2tdQ.get( paramIndex ); + assert tdParamQ != null; + VariableNode lnParamQ = ogCallee.td2vn.get( tdParamQ ); + assert lnParamQ != null; + RefEdge edgeSpecialQ_i = lnParamQ.getReferenceTo( hrnPrimary, null, null ); + assert edgeSpecialQ_i != null; + ReachSet qBeta = toShadowTokens( ogCallee, edgeSpecialQ_i.getBeta() ); + + ReachTuple p_i = ogCallee.paramIndex2paramTokenPrimary .get( paramIndex ); + ReachTuple s_i = ogCallee.paramIndex2paramTokenSecondary.get( paramIndex ); + + ReachSet K_p = new ReachSet().makeCanonical(); + ReachSet K_p2 = new ReachSet().makeCanonical(); + if( s_i == null ) { + K_p = qBeta; + } else { + // sort qBeta into K_p1 and K_p2 + Iterator ttsItr = qBeta.iterator(); + while( ttsItr.hasNext() ) { + ReachState tts = ttsItr.next(); + if( s_i != null && tts.containsBoth( p_i, s_i ) ) { + K_p2 = K_p2.union( tts ); + } else { + K_p = K_p.union( tts ); + } + } + } + paramIndex2rewriteK_p .put( paramIndex, K_p ); + paramIndex2rewriteK_p2.put( paramIndex, K_p2 ); + + + // if there is a secondary node, compute the rest of the rewrite rules + if( ogCallee.paramIndex2idSecondary.containsKey( paramIndex ) ) { + + // setup H (secondary) + Integer idSecondary = ogCallee.paramIndex2idSecondary.get( paramIndex ); + assert ogCallee.id2hrn.containsKey( idSecondary ); + HeapRegionNode hrnSecondary = ogCallee.id2hrn.get( idSecondary ); + assert hrnSecondary != null; + paramIndex2rewriteH_s.put( paramIndex, toShadowTokens( ogCallee, hrnSecondary.getAlpha() ) ); + + // setup J (secondary->X) + Iterator s2xItr = hrnSecondary.iteratorToReferencees(); + while( s2xItr.hasNext() ) { + RefEdge s2xEdge = s2xItr.next(); + + if( !s2xEdge.isInitialParam() ) { continue; } + + HeapRegionNode hrnDst = s2xEdge.getDst(); + + if( ogCallee.idPrimary2paramIndexSet.containsKey( hrnDst.getID() ) ) { + Iterator jItr = ogCallee.idPrimary2paramIndexSet.get( hrnDst.getID() ).iterator(); + while( jItr.hasNext() ) { + Integer j = jItr.next(); + paramIndex2rewriteJ_s2p.put( i, toShadowTokens( ogCallee, s2xEdge.getBeta() ) ); + } + + } else { + assert ogCallee.idSecondary2paramIndexSet.containsKey( hrnDst.getID() ); + paramIndex2rewriteJ_s2s.put( i, toShadowTokens( ogCallee, s2xEdge.getBeta() ) ); + } + } + + // setup K (secondary) + TempDescriptor tdParamR = ogCallee.paramIndex2tdR.get( paramIndex ); + assert tdParamR != null; + VariableNode lnParamR = ogCallee.td2vn.get( tdParamR ); + assert lnParamR != null; + RefEdge edgeSpecialR_i = lnParamR.getReferenceTo( hrnSecondary, null, null ); + assert edgeSpecialR_i != null; + paramIndex2rewriteK_s.put( paramIndex, + toShadowTokens( ogCallee, edgeSpecialR_i.getBeta() ) ); + } + + + // now depending on whether the callee is static or not + // we need to account for a "this" argument in order to + // find the matching argument in the caller context + TempDescriptor argTemp_i = fc.getArgMatchingParamIndex( fm, paramIndex ); + + // remember which caller arg label maps to param index + VariableNode argLabel_i = getVariableNodeFromTemp( argTemp_i ); + paramIndex2ln.put( paramIndex, argLabel_i ); + + // do a callee-effect strong update pre-pass here + if( argTemp_i.getType().isClass() ) { + + Iterator edgeItr = argLabel_i.iteratorToReferencees(); + while( edgeItr.hasNext() ) { + RefEdge edge = edgeItr.next(); + HeapRegionNode hrn = edge.getDst(); + + if( (hrn.getNumReferencers() == 1) || // case 1 + (hrn.isSingleObject() && argLabel_i.getNumReferencees() == 1) // case 2 + ) { + if( !DISABLE_STRONG_UPDATES ) { + effectCalleeStrongUpdates( paramIndex, ogCallee, hrn ); + } + } + } + } + + // then calculate the d and D rewrite rules + ReachSet d_i_p = new ReachSet().makeCanonical(); + ReachSet d_i_s = new ReachSet().makeCanonical(); + Iterator edgeItr = argLabel_i.iteratorToReferencees(); + while( edgeItr.hasNext() ) { + RefEdge edge = edgeItr.next(); + + d_i_p = d_i_p.union( edge.getBeta().intersection( edge.getDst().getAlpha() ) ); + d_i_s = d_i_s.union( edge.getBeta() ); + } + paramIndex2rewrite_d_p.put( paramIndex, d_i_p ); + paramIndex2rewrite_d_s.put( paramIndex, d_i_s ); + + // TODO: we should only do this when we need it, and then + // memoize it for the rest of the mapping procedure + ReachSet D_i = d_i_s.exhaustiveArityCombinations(); + paramIndex2rewriteD.put( paramIndex, D_i ); + } + + + // with respect to each argument, map parameter effects into caller + HashSet nodesWithNewAlpha = new HashSet(); + HashSet edgesWithNewBeta = new HashSet(); + + Hashtable > pi2dr = + new Hashtable >(); + + Hashtable > pi2r = + new Hashtable >(); + + Set defParamObj = new HashSet(); + + Iterator lnArgItr = paramIndex2ln.entrySet().iterator(); + while( lnArgItr.hasNext() ) { + Map.Entry me = (Map.Entry) lnArgItr.next(); + Integer index = (Integer) me.getKey(); + VariableNode lnArg_i = (VariableNode) me.getValue(); + + Set dr = new HashSet(); + Set r = new HashSet(); + Set todo = new HashSet(); + + // find all reachable nodes starting with label referencees + Iterator edgeArgItr = lnArg_i.iteratorToReferencees(); + while( edgeArgItr.hasNext() ) { + RefEdge edge = edgeArgItr.next(); + HeapRegionNode hrn = edge.getDst(); + + dr.add( hrn ); + + if( lnArg_i.getNumReferencees() == 1 && hrn.isSingleObject() ) { + defParamObj.add( hrn ); + } + + Iterator edgeHrnItr = hrn.iteratorToReferencees(); + while( edgeHrnItr.hasNext() ) { + RefEdge edger = edgeHrnItr.next(); + todo.add( edger.getDst() ); + } + + // then follow links until all reachable nodes have been found + while( !todo.isEmpty() ) { + HeapRegionNode hrnr = todo.iterator().next(); + todo.remove( hrnr ); + + r.add( hrnr ); + + Iterator edgeItr = hrnr.iteratorToReferencees(); + while( edgeItr.hasNext() ) { + RefEdge edger = edgeItr.next(); + if( !r.contains( edger.getDst() ) ) { + todo.add( edger.getDst() ); + } + } + } + + if( hrn.isSingleObject() ) { + r.remove( hrn ); + } + } + + pi2dr.put( index, dr ); + pi2r .put( index, r ); + } + + assert defParamObj.size() <= fm.numParameters(); + + // if we're in parameter decomposition mode, report some results here + if( pd != null ) { + Iterator mapItr; + + // report primary parameter object mappings + mapItr = pi2dr.entrySet().iterator(); + while( mapItr.hasNext() ) { + Map.Entry me = (Map.Entry) mapItr.next(); + Integer paramIndex = (Integer) me.getKey(); + Set hrnAset = (Set) me.getValue(); + + Iterator hrnItr = hrnAset.iterator(); + while( hrnItr.hasNext() ) { + HeapRegionNode hrnA = hrnItr.next(); + pd.mapRegionToParamObject( hrnA, paramIndex ); + } + } + + // report parameter-reachable mappings + mapItr = pi2r.entrySet().iterator(); + while( mapItr.hasNext() ) { + Map.Entry me = (Map.Entry) mapItr.next(); + Integer paramIndex = (Integer) me.getKey(); + Set hrnRset = (Set) me.getValue(); + + Iterator hrnItr = hrnRset.iterator(); + while( hrnItr.hasNext() ) { + HeapRegionNode hrnR = hrnItr.next(); + pd.mapRegionToParamReachable( hrnR, paramIndex ); + } + } + + // and we're done in this method for special param decomp mode + return; + } + + + // now iterate over reachable nodes to rewrite their alpha, and + // classify edges found for beta rewrite + Hashtable tokens2states = new Hashtable(); + + Hashtable< Integer, Set > edges_p2p = new Hashtable< Integer, Set >(); + Hashtable< Integer, Set > edges_p2s = new Hashtable< Integer, Set >(); + Hashtable< Integer, Set > edges_s2p = new Hashtable< Integer, Set >(); + Hashtable< Integer, Set > edges_s2s = new Hashtable< Integer, Set >(); + Hashtable< Integer, Set > edges_up_dr = new Hashtable< Integer, Set >(); + Hashtable< Integer, Set > edges_up_r = new Hashtable< Integer, Set >(); + + // so again, with respect to some arg i... + lnArgItr = paramIndex2ln.entrySet().iterator(); + while( lnArgItr.hasNext() ) { + Map.Entry me = (Map.Entry) lnArgItr.next(); + Integer index = (Integer) me.getKey(); + VariableNode lnArg_i = (VariableNode) me.getValue(); + + ReachTuple p_i = ogCallee.paramIndex2paramTokenPrimary.get( index ); + ReachTuple s_i = ogCallee.paramIndex2paramTokenSecondary.get( index ); + assert p_i != null; + + ensureEmptyEdgeIndexPair( edges_p2p, index ); + ensureEmptyEdgeIndexPair( edges_p2s, index ); + ensureEmptyEdgeIndexPair( edges_s2p, index ); + ensureEmptyEdgeIndexPair( edges_s2s, index ); + ensureEmptyEdgeIndexPair( edges_up_dr, index ); + ensureEmptyEdgeIndexPair( edges_up_r, index ); + + Set dr = pi2dr.get( index ); + Iterator hrnItr = dr.iterator(); + while( hrnItr.hasNext() ) { + // this heap region is definitely an "a_i" or primary by virtue of being in dr + HeapRegionNode hrn = hrnItr.next(); + + tokens2states.clear(); + tokens2states.put( p_i, hrn.getAlpha() ); + + rewriteCallerReachability( index, + hrn, + null, + paramIndex2rewriteH_p.get( index ), + tokens2states, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + false, + null ); + + nodesWithNewAlpha.add( hrn ); + + // sort edges + Iterator edgeItr = hrn.iteratorToReferencers(); + while( edgeItr.hasNext() ) { + RefEdge edge = edgeItr.next(); + RefSrcNode on = edge.getSrc(); + + boolean edge_classified = false; + + + if( on instanceof HeapRegionNode ) { + // hrn0 may be "a_j" and/or "r_j" or even neither + HeapRegionNode hrn0 = (HeapRegionNode) on; + + Iterator itr = pi2dr.entrySet().iterator(); + while( itr.hasNext() ) { + Map.Entry mo = (Map.Entry) itr.next(); + Integer pi = (Integer) mo.getKey(); + Set dr_i = (Set) mo.getValue(); + + if( dr_i.contains( hrn0 ) ) { + addEdgeIndexPair( edges_p2p, pi, edge, index ); + edge_classified = true; + } + } + + itr = pi2r.entrySet().iterator(); + while( itr.hasNext() ) { + Map.Entry mo = (Map.Entry) itr.next(); + Integer pi = (Integer) mo.getKey(); + Set r_i = (Set) mo.getValue(); + + if( r_i.contains( hrn0 ) ) { + addEdgeIndexPair( edges_s2p, pi, edge, index ); + edge_classified = true; + } + } + } + + // all of these edges are upstream of directly reachable objects + if( !edge_classified ) { + addEdgeIndexPair( edges_up_dr, index, edge, index ); + } + } + } + + + Set r = pi2r.get( index ); + hrnItr = r.iterator(); + while( hrnItr.hasNext() ) { + // this heap region is definitely an "r_i" or secondary by virtue of being in r + HeapRegionNode hrn = hrnItr.next(); + + if( paramIndex2rewriteH_s.containsKey( index ) ) { + + tokens2states.clear(); + tokens2states.put( p_i, new ReachSet().makeCanonical() ); + tokens2states.put( s_i, hrn.getAlpha() ); + + rewriteCallerReachability( index, + hrn, + null, + paramIndex2rewriteH_s.get( index ), + tokens2states, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + false, + null ); + + nodesWithNewAlpha.add( hrn ); + } + + // sort edges + Iterator edgeItr = hrn.iteratorToReferencers(); + while( edgeItr.hasNext() ) { + RefEdge edge = edgeItr.next(); + RefSrcNode on = edge.getSrc(); + + boolean edge_classified = false; + + if( on instanceof HeapRegionNode ) { + // hrn0 may be "a_j" and/or "r_j" or even neither + HeapRegionNode hrn0 = (HeapRegionNode) on; + + Iterator itr = pi2dr.entrySet().iterator(); + while( itr.hasNext() ) { + Map.Entry mo = (Map.Entry) itr.next(); + Integer pi = (Integer) mo.getKey(); + Set dr_i = (Set) mo.getValue(); + + if( dr_i.contains( hrn0 ) ) { + addEdgeIndexPair( edges_p2s, pi, edge, index ); + edge_classified = true; + } + } + + itr = pi2r.entrySet().iterator(); + while( itr.hasNext() ) { + Map.Entry mo = (Map.Entry) itr.next(); + Integer pi = (Integer) mo.getKey(); + Set r_i = (Set) mo.getValue(); + + if( r_i.contains( hrn0 ) ) { + addEdgeIndexPair( edges_s2s, pi, edge, index ); + edge_classified = true; + } + } + } + + // these edges are all upstream of some reachable node + if( !edge_classified ) { + addEdgeIndexPair( edges_up_r, index, edge, index ); + } + } + } + } + + + // and again, with respect to some arg i... + lnArgItr = paramIndex2ln.entrySet().iterator(); + while( lnArgItr.hasNext() ) { + Map.Entry me = (Map.Entry) lnArgItr.next(); + Integer index = (Integer) me.getKey(); + VariableNode lnArg_i = (VariableNode) me.getValue(); + + + // update reachable edges + Iterator edgeItr = edges_p2p.get( index ).iterator(); + while( edgeItr.hasNext() ) { + Vector mo = (Vector) edgeItr.next(); + RefEdge edge = (RefEdge) mo.get( 0 ); + Integer indexJ = (Integer) mo.get( 1 ); + + if( !paramIndex2rewriteJ_p2p.containsKey( makeMapKey( index, + indexJ, + edge.getField() ) ) ) { + continue; + } + + ReachTuple p_j = ogCallee.paramIndex2paramTokenPrimary.get( indexJ ); + assert p_j != null; + + tokens2states.clear(); + tokens2states.put( p_j, edge.getBeta() ); + + rewriteCallerReachability( index, + null, + edge, + paramIndex2rewriteJ_p2p.get( makeMapKey( index, + indexJ, + edge.getField() ) ), + tokens2states, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + false, + null ); + + edgesWithNewBeta.add( edge ); + } + + + edgeItr = edges_p2s.get( index ).iterator(); + while( edgeItr.hasNext() ) { + Vector mo = (Vector) edgeItr.next(); + RefEdge edge = (RefEdge) mo.get( 0 ); + Integer indexJ = (Integer) mo.get( 1 ); + + if( !paramIndex2rewriteJ_p2s.containsKey( makeMapKey( index, + edge.getField() ) ) ) { + continue; + } + + ReachTuple s_j = ogCallee.paramIndex2paramTokenSecondary.get( indexJ ); + assert s_j != null; + + tokens2states.clear(); + tokens2states.put( s_j, edge.getBeta() ); + + rewriteCallerReachability( index, + null, + edge, + paramIndex2rewriteJ_p2s.get( makeMapKey( index, + edge.getField() ) ), + tokens2states, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + false, + null ); + + edgesWithNewBeta.add( edge ); + } + + + edgeItr = edges_s2p.get( index ).iterator(); + while( edgeItr.hasNext() ) { + Vector mo = (Vector) edgeItr.next(); + RefEdge edge = (RefEdge) mo.get( 0 ); + Integer indexJ = (Integer) mo.get( 1 ); + + if( !paramIndex2rewriteJ_s2p.containsKey( index ) ) { + continue; + } + + ReachTuple p_j = ogCallee.paramIndex2paramTokenPrimary.get( indexJ ); + assert p_j != null; + + tokens2states.clear(); + tokens2states.put( p_j, edge.getBeta() ); + + rewriteCallerReachability( index, + null, + edge, + paramIndex2rewriteJ_s2p.get( index ), + tokens2states, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + false, + null ); + + edgesWithNewBeta.add( edge ); + } + + + edgeItr = edges_s2s.get( index ).iterator(); + while( edgeItr.hasNext() ) { + Vector mo = (Vector) edgeItr.next(); + RefEdge edge = (RefEdge) mo.get( 0 ); + Integer indexJ = (Integer) mo.get( 1 ); + + if( !paramIndex2rewriteJ_s2s.containsKey( index ) ) { + continue; + } + + ReachTuple s_j = ogCallee.paramIndex2paramTokenSecondary.get( indexJ ); + assert s_j != null; + + tokens2states.clear(); + tokens2states.put( s_j, edge.getBeta() ); + + rewriteCallerReachability( index, + null, + edge, + paramIndex2rewriteJ_s2s.get( index ), + tokens2states, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + false, + null ); + + edgesWithNewBeta.add( edge ); + } + + + // update directly upstream edges + Hashtable edgeUpstreamPlannedChanges = + new Hashtable(); + + HashSet edgesDirectlyUpstream = + new HashSet(); + + edgeItr = edges_up_dr.get( index ).iterator(); + while( edgeItr.hasNext() ) { + Vector mo = (Vector) edgeItr.next(); + RefEdge edge = (RefEdge) mo.get( 0 ); + Integer indexJ = (Integer) mo.get( 1 ); + + edgesDirectlyUpstream.add( edge ); + + ReachTuple p_j = ogCallee.paramIndex2paramTokenPrimary.get( indexJ ); + assert p_j != null; + + // start with K_p2 and p_j + tokens2states.clear(); + tokens2states.put( p_j, edge.getBeta() ); + + rewriteCallerReachability( index, + null, + edge, + paramIndex2rewriteK_p2.get( index ), + tokens2states, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + true, + edgeUpstreamPlannedChanges ); + + // and add in s_j, if required, and do K_p + ReachTuple s_j = ogCallee.paramIndex2paramTokenSecondary.get( indexJ ); + if( s_j != null ) { + tokens2states.put( s_j, edge.getBeta() ); + } + + rewriteCallerReachability( index, + null, + edge, + paramIndex2rewriteK_p.get( index ), + tokens2states, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + true, + edgeUpstreamPlannedChanges ); + + edgesWithNewBeta.add( edge ); + } + + propagateTokensOverEdges( edgesDirectlyUpstream, + edgeUpstreamPlannedChanges, + edgesWithNewBeta ); + + + // update upstream edges + edgeUpstreamPlannedChanges = + new Hashtable(); + + HashSet edgesUpstream = + new HashSet(); + + edgeItr = edges_up_r.get( index ).iterator(); + while( edgeItr.hasNext() ) { + Vector mo = (Vector) edgeItr.next(); + RefEdge edge = (RefEdge) mo.get( 0 ); + Integer indexJ = (Integer) mo.get( 1 ); + + if( !paramIndex2rewriteK_s.containsKey( index ) ) { + continue; + } + + edgesUpstream.add( edge ); + + ReachTuple p_j = ogCallee.paramIndex2paramTokenPrimary.get( indexJ ); + assert p_j != null; + + ReachTuple s_j = ogCallee.paramIndex2paramTokenSecondary.get( indexJ ); + assert s_j != null; + + tokens2states.clear(); + tokens2states.put( p_j, rsWttsEmpty ); + tokens2states.put( s_j, edge.getBeta() ); + + rewriteCallerReachability( index, + null, + edge, + paramIndex2rewriteK_s.get( index ), + tokens2states, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + true, + edgeUpstreamPlannedChanges ); + + edgesWithNewBeta.add( edge ); + } + + propagateTokensOverEdges( edgesUpstream, + edgeUpstreamPlannedChanges, + edgesWithNewBeta ); + + } // end effects per argument/parameter map + + + // commit changes to alpha and beta + Iterator nodeItr = nodesWithNewAlpha.iterator(); + while( nodeItr.hasNext() ) { + nodeItr.next().applyAlphaNew(); + } + + Iterator edgeItr = edgesWithNewBeta.iterator(); + while( edgeItr.hasNext() ) { + edgeItr.next().applyBetaNew(); + } + + + // verify the existence of allocation sites and their + // shadows from the callee in the context of this caller graph + // then map allocated nodes of callee onto the caller shadows + // of them + Hashtable tokens2statesEmpty = new Hashtable(); + + Iterator asItr = ogCallee.allocSites.iterator(); + while( asItr.hasNext() ) { + AllocSite allocSite = asItr.next(); + + // grab the summary in the caller just to make sure + // the allocation site has nodes in the caller + HeapRegionNode hrnSummary = getSummaryNode( allocSite ); + + // assert that the shadow nodes have no reference edges + // because they're brand new to the graph, or last time + // they were used they should have been cleared of edges + HeapRegionNode hrnShadowSummary = getShadowSummaryNode( allocSite ); + assert hrnShadowSummary.getNumReferencers() == 0; + assert hrnShadowSummary.getNumReferencees() == 0; + + // then bring g_ij onto g'_ij and rewrite + HeapRegionNode hrnSummaryCallee = ogCallee.getSummaryNode( allocSite ); + hrnShadowSummary.setAlpha( toShadowTokens( ogCallee, hrnSummaryCallee.getAlpha() ) ); + + // shadow nodes only are touched by a rewrite one time, + // so rewrite and immediately commit--and they don't belong + // to a particular parameter, so use a bogus param index + // that pulls a self-rewrite out of H + rewriteCallerReachability( bogusIndex, + hrnShadowSummary, + null, + funcScriptR( hrnShadowSummary.getAlpha(), ogCallee, mc ), + tokens2statesEmpty, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + false, + null ); + + hrnShadowSummary.applyAlphaNew(); + + + for( int i = 0; i < allocSite.getAllocationDepth(); ++i ) { + Integer idIth = allocSite.getIthOldest(i); + assert id2hrn.containsKey(idIth); + HeapRegionNode hrnIth = id2hrn.get(idIth); + + Integer idShadowIth = -(allocSite.getIthOldest(i)); + assert id2hrn.containsKey(idShadowIth); + HeapRegionNode hrnIthShadow = id2hrn.get(idShadowIth); + assert hrnIthShadow.getNumReferencers() == 0; + assert hrnIthShadow.getNumReferencees() == 0; + + assert ogCallee.id2hrn.containsKey(idIth); + HeapRegionNode hrnIthCallee = ogCallee.id2hrn.get(idIth); + hrnIthShadow.setAlpha(toShadowTokens(ogCallee, hrnIthCallee.getAlpha() ) ); + + rewriteCallerReachability( bogusIndex, + hrnIthShadow, + null, + funcScriptR( hrnIthShadow.getAlpha(), ogCallee, mc ), + tokens2statesEmpty, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + false, + null ); + + hrnIthShadow.applyAlphaNew(); + } + } + + + // for every heap region->heap region edge in the + // callee graph, create the matching edge or edges + // in the caller graph + Set sCallee = ogCallee.id2hrn.entrySet(); + Iterator iCallee = sCallee.iterator(); + + while( iCallee.hasNext() ) { + Map.Entry meCallee = (Map.Entry) iCallee.next(); + Integer idCallee = (Integer) meCallee.getKey(); + HeapRegionNode hrnCallee = (HeapRegionNode) meCallee.getValue(); + + Iterator heapRegionsItrCallee = hrnCallee.iteratorToReferencees(); + while( heapRegionsItrCallee.hasNext() ) { + RefEdge edgeCallee = heapRegionsItrCallee.next(); + HeapRegionNode hrnChildCallee = edgeCallee.getDst(); + Integer idChildCallee = hrnChildCallee.getID(); + + // only address this edge if it is not a special initial edge + if( !edgeCallee.isInitialParam() ) { + + // now we know that in the callee method's ownership graph + // there is a heap region->heap region reference edge given + // by heap region pointers: + // hrnCallee -> heapChildCallee + // + // or by the ownership-graph independent ID's: + // idCallee -> idChildCallee + + // make the edge with src and dst so beta info is + // calculated once, then copy it for each new edge in caller + + RefEdge edgeNewInCallerTemplate = new RefEdge( null, + null, + edgeCallee.getType(), + edgeCallee.getField(), + false, + funcScriptR( toShadowTokens( ogCallee, + edgeCallee.getBeta() + ), + ogCallee, + mc ) + ); + + rewriteCallerReachability( bogusIndex, + null, + edgeNewInCallerTemplate, + edgeNewInCallerTemplate.getBeta(), + tokens2statesEmpty, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + false, + null ); + + edgeNewInCallerTemplate.applyBetaNew(); + + + // So now make a set of possible source heaps in the caller graph + // and a set of destination heaps in the caller graph, and make + // a reference edge in the caller for every possible (src,dst) pair + HashSet possibleCallerSrcs = + getHRNSetThatPossiblyMapToCalleeHRN( ogCallee, + (HeapRegionNode) edgeCallee.getSrc(), + pi2dr, + pi2r ); + + HashSet possibleCallerDsts = + getHRNSetThatPossiblyMapToCalleeHRN( ogCallee, + edgeCallee.getDst(), + pi2dr, + pi2r ); + + // make every possible pair of {srcSet} -> {dstSet} edges in the caller + Iterator srcItr = possibleCallerSrcs.iterator(); + while( srcItr.hasNext() ) { + HeapRegionNode src = (HeapRegionNode) srcItr.next(); + + if( !hasMatchingField( src, edgeCallee ) ) { + // prune this source node possibility + continue; + } + + Iterator dstItr = possibleCallerDsts.iterator(); + while( dstItr.hasNext() ) { + HeapRegionNode dst = (HeapRegionNode) dstItr.next(); + + if( !hasMatchingType( edgeCallee, dst ) ) { + // prune + continue; + } + + + + + + // otherwise the caller src and dst pair can match the edge, so make it + TypeDescriptor tdNewEdge = + mostSpecificType( edgeCallee.getType(), + hrnChildCallee.getType(), + dst.getType() + ); + + RefEdge edgeNewInCaller = edgeNewInCallerTemplate.copy(); + edgeNewInCaller.setSrc( src ); + edgeNewInCaller.setDst( dst ); + edgeNewInCaller.setType( tdNewEdge ); + + + // handle taint info if callee created this edge + // added by eom + Set pParamSet=idPrimary2paramIndexSet.get(dst.getID()); + Set sParamSet=idSecondary2paramIndexSet.get(dst.getID()); + HashSet paramSet=new HashSet(); + if(pParamSet!=null){ + paramSet.addAll(pParamSet); + } + if(sParamSet!=null){ + paramSet.addAll(sParamSet); + } + Iterator paramIter=paramSet.iterator(); + int newTaintIdentifier=0; + while(paramIter.hasNext()){ + Integer paramIdx=paramIter.next(); + edgeNewInCaller.tainedBy(paramIdx); + } + + RefEdge edgeExisting = src.getReferenceTo( dst, + edgeNewInCaller.getType(), + edgeNewInCaller.getField() ); + if( edgeExisting == null ) { + // if this edge doesn't exist in the caller, create it + addRefEdge( src, dst, edgeNewInCaller ); + + } else { + // if it already exists, merge with it + edgeExisting.setBeta( edgeExisting.getBeta().union( edgeNewInCaller.getBeta() ) ); + } + } + } + } + } + } + + + + // return value may need to be assigned in caller + TempDescriptor returnTemp = fc.getReturnTemp(); + if( returnTemp != null && !returnTemp.getType().isImmutable() ) { + + VariableNode lnLhsCaller = getVariableNodeFromTemp( returnTemp ); + clearRefEdgesFrom( lnLhsCaller, null, null, true ); + + VariableNode lnReturnCallee = ogCallee.getVariableNodeFromTemp( tdReturn ); + Iterator edgeCalleeItr = lnReturnCallee.iteratorToReferencees(); + while( edgeCalleeItr.hasNext() ) { + RefEdge edgeCallee = edgeCalleeItr.next(); + HeapRegionNode hrnChildCallee = edgeCallee.getDst(); + + // some edge types are not possible return values when we can + // see what type variable we are assigning it to + if( !isSuperiorType( returnTemp.getType(), edgeCallee.getType() ) ) { + System.out.println( "*** NOT EXPECTING TO SEE THIS: Throwing out "+edgeCallee+" for return temp "+returnTemp ); + // prune + continue; + } + + RefEdge edgeNewInCallerTemplate = new RefEdge( null, + null, + edgeCallee.getType(), + edgeCallee.getField(), + false, + funcScriptR( toShadowTokens(ogCallee, + edgeCallee.getBeta() ), + ogCallee, + mc ) + ); + rewriteCallerReachability( bogusIndex, + null, + edgeNewInCallerTemplate, + edgeNewInCallerTemplate.getBeta(), + tokens2statesEmpty, + paramIndex2rewrite_d_p, + paramIndex2rewrite_d_s, + paramIndex2rewriteD, + ogCallee, + false, + null ); + + edgeNewInCallerTemplate.applyBetaNew(); + + + HashSet assignCallerRhs = + getHRNSetThatPossiblyMapToCalleeHRN( ogCallee, + edgeCallee.getDst(), + pi2dr, + pi2r ); + + Iterator itrHrn = assignCallerRhs.iterator(); + while( itrHrn.hasNext() ) { + HeapRegionNode hrnCaller = itrHrn.next(); + + // don't make edge in caller if it is disallowed by types + if( !isSuperiorType( returnTemp.getType(), hrnCaller.getType() ) ) { + // prune + continue; + } + + if( !isSuperiorType( returnTemp.getType(), hrnChildCallee.getType() ) ) { + // prune + continue; + } + + if( !isSuperiorType( edgeCallee.getType(), hrnCaller.getType() ) ) { + // prune + continue; + } + + TypeDescriptor tdNewEdge = + mostSpecificType( edgeCallee.getType(), + hrnChildCallee.getType(), + hrnCaller.getType() + ); + + // otherwise caller node can match callee edge, so make it + RefEdge edgeNewInCaller = edgeNewInCallerTemplate.copy(); + edgeNewInCaller.setSrc( lnLhsCaller ); + edgeNewInCaller.setDst( hrnCaller ); + edgeNewInCaller.setType( tdNewEdge ); + + RefEdge edgeExisting = lnLhsCaller.getReferenceTo( hrnCaller, + tdNewEdge, + edgeNewInCaller.getField() ); + if( edgeExisting == null ) { + + // if this edge doesn't exist in the caller, create it + addRefEdge( lnLhsCaller, hrnCaller, edgeNewInCaller ); + } else { + // if it already exists, merge with it + edgeExisting.setBeta( edgeExisting.getBeta().union( edgeNewInCaller.getBeta() ) ); + } + } + } + } + + + + // merge the shadow nodes of allocation sites back down to normal capacity + Iterator allocItr = ogCallee.allocSites.iterator(); + while( allocItr.hasNext() ) { + AllocSite as = allocItr.next(); + + // first age each allocation site enough times to make room for the shadow nodes + for( int i = 0; i < as.getAllocationDepth(); ++i ) { + age( as ); + } + + // then merge the shadow summary into the normal summary + HeapRegionNode hrnSummary = getSummaryNode( as ); + assert hrnSummary != null; + + HeapRegionNode hrnSummaryShadow = getShadowSummaryNode( as ); + assert hrnSummaryShadow != null; + + mergeIntoSummary( hrnSummaryShadow, hrnSummary ); + + // then clear off after merge + clearRefEdgesFrom( hrnSummaryShadow, null, null, true ); + clearRefEdgesTo ( hrnSummaryShadow, null, null, true ); + hrnSummaryShadow.setAlpha( new ReachSet().makeCanonical() ); + + // then transplant shadow nodes onto the now clean normal nodes + for( int i = 0; i < as.getAllocationDepth(); ++i ) { + + Integer idIth = as.getIthOldest( i ); + HeapRegionNode hrnIth = id2hrn.get( idIth ); + Integer idIthShadow = as.getIthOldestShadow( i ); + HeapRegionNode hrnIthShadow = id2hrn.get( idIthShadow ); + + transferOnto( hrnIthShadow, hrnIth ); + + // clear off shadow nodes after transfer + clearRefEdgesFrom( hrnIthShadow, null, null, true ); + clearRefEdgesTo ( hrnIthShadow, null, null, true ); + hrnIthShadow.setAlpha( new ReachSet().makeCanonical() ); + } + + // finally, globally change shadow tokens into normal tokens + Iterator itrAllVariableNodes = td2vn.entrySet().iterator(); + while( itrAllVariableNodes.hasNext() ) { + Map.Entry me = (Map.Entry) itrAllVariableNodes.next(); + VariableNode ln = (VariableNode) me.getValue(); + + Iterator itrEdges = ln.iteratorToReferencees(); + while( itrEdges.hasNext() ) { + unshadowTokens( as, itrEdges.next() ); + } + } + + Iterator itrAllHRNodes = id2hrn.entrySet().iterator(); + while( itrAllHRNodes.hasNext() ) { + Map.Entry me = (Map.Entry) itrAllHRNodes.next(); + HeapRegionNode hrnToAge = (HeapRegionNode) me.getValue(); + + unshadowTokens( as, hrnToAge ); + + Iterator itrEdges = hrnToAge.iteratorToReferencees(); + while( itrEdges.hasNext() ) { + unshadowTokens( as, itrEdges.next() ); + } + } + } + + + + // improve reachability as much as possible + if( !DISABLE_GLOBAL_SWEEP ) { + globalSweep(); + } + + + if( debugCallMap && + mc.getDescriptor().getSymbol().equals( debugCaller ) && + fm.getMethod().getSymbol().equals( debugCallee ) + ) { + + try { + writeGraph( "debug9endResolveCall", + true, // write labels (variables) + true, // selectively hide intermediate temp vars + true, // prune unreachable heap regions + false, // show back edges to confirm graph validity + false, // show parameter indices (unmaintained!) + true, // hide subset reachability states + true); // hide edge taints + } catch( IOException e ) {} + System.out.println( " "+mc+" done calling "+fm ); + ++x; + if( x == debugCallMapCount ) { + System.exit( 0 ); + } + } + } + + static int x = 0; + + + protected boolean hasMatchingField(HeapRegionNode src, RefEdge edge) { + + // if no type, then it's a match-everything region + TypeDescriptor tdSrc = src.getType(); + if( tdSrc == null ) { + return true; + } + + if( tdSrc.isArray() ) { + TypeDescriptor td = edge.getType(); + assert td != null; + + TypeDescriptor tdSrcDeref = tdSrc.dereference(); + assert tdSrcDeref != null; + + if( !typeUtil.isSuperorType( tdSrcDeref, td ) ) { + return false; + } + + return edge.getField().equals( DisjointAnalysis.arrayElementFieldName ); + } + + // if it's not a class, it doesn't have any fields to match + if( !tdSrc.isClass() ) { + return false; + } + + ClassDescriptor cd = tdSrc.getClassDesc(); + while( cd != null ) { + Iterator fieldItr = cd.getFields(); + + while( fieldItr.hasNext() ) { + FieldDescriptor fd = (FieldDescriptor) fieldItr.next(); + + if( fd.getType().equals( edge.getType() ) && + fd.getSymbol().equals( edge.getField() ) ) { + return true; + } + } + + cd = cd.getSuperDesc(); + } + + // otherwise it is a class with fields + // but we didn't find a match + return false; + } + + + protected boolean hasMatchingType(RefEdge edge, HeapRegionNode dst) { + + // if the region has no type, matches everything + TypeDescriptor tdDst = dst.getType(); + if( tdDst == null ) { + return true; + } + + // if the type is not a class or an array, don't + // match because primitives are copied, no aliases + ClassDescriptor cdDst = tdDst.getClassDesc(); + if( cdDst == null && !tdDst.isArray() ) { + return false; + } + + // if the edge type is null, it matches everything + TypeDescriptor tdEdge = edge.getType(); + if( tdEdge == null ) { + return true; + } + + return typeUtil.isSuperorType(tdEdge, tdDst); + } + + + protected void unshadowTokens(AllocSite as, RefEdge edge) { + edge.setBeta(edge.getBeta().unshadowTokens(as) ); + } + + protected void unshadowTokens(AllocSite as, HeapRegionNode hrn) { + hrn.setAlpha(hrn.getAlpha().unshadowTokens(as) ); + } + + + private ReachSet toShadowTokens(ReachGraph ogCallee, + ReachSet rsIn) { + + ReachSet rsOut = new ReachSet(rsIn).makeCanonical(); + + Iterator allocItr = ogCallee.allocSites.iterator(); + while( allocItr.hasNext() ) { + AllocSite as = allocItr.next(); + + rsOut = rsOut.toShadowTokens(as); + } + + return rsOut.makeCanonical(); + } + + + private void rewriteCallerReachability(Integer paramIndex, + HeapRegionNode hrn, + RefEdge edge, + ReachSet rules, + Hashtable tokens2states, + Hashtable paramIndex2rewrite_d_p, + Hashtable paramIndex2rewrite_d_s, + Hashtable paramIndex2rewriteD, + ReachGraph ogCallee, + boolean makeChangeSet, + Hashtable edgePlannedChanges) { + + assert(hrn == null && edge != null) || + (hrn != null && edge == null); + + assert rules != null; + assert tokens2states != null; + + ReachSet callerReachabilityNew = new ReachSet().makeCanonical(); + + // for initializing structures in this method + ReachState ttsEmpty = new ReachTupleSet().makeCanonical(); + + // use this to construct a change set if required; the idea is to + // map every partially rewritten token tuple set to the set of + // caller-context token tuple sets that were used to generate it + Hashtable > rewritten2source = + new Hashtable >(); + rewritten2source.put( ttsEmpty, new HashSet() ); + + + Iterator rulesItr = rules.iterator(); + while(rulesItr.hasNext()) { + ReachState rule = rulesItr.next(); + + ReachSet rewrittenRule = new ReachSet(ttsEmpty).makeCanonical(); + + Iterator ruleItr = rule.iterator(); + while(ruleItr.hasNext()) { + ReachTuple ttCallee = ruleItr.next(); + + // compute the possibilities for rewriting this callee token + ReachSet ttCalleeRewrites = null; + boolean callerSourceUsed = false; + + if( tokens2states.containsKey( ttCallee ) ) { + callerSourceUsed = true; + ttCalleeRewrites = tokens2states.get( ttCallee ); + assert ttCalleeRewrites != null; + + } else if( ogCallee.paramTokenPrimary2paramIndex.containsKey( ttCallee ) ) { + // use little d_p + Integer paramIndex_j = ogCallee.paramTokenPrimary2paramIndex.get( ttCallee ); + assert paramIndex_j != null; + ttCalleeRewrites = paramIndex2rewrite_d_p.get( paramIndex_j ); + assert ttCalleeRewrites != null; + + } else if( ogCallee.paramTokenSecondary2paramIndex.containsKey( ttCallee ) ) { + // use little d_s + Integer paramIndex_j = ogCallee.paramTokenSecondary2paramIndex.get( ttCallee ); + assert paramIndex_j != null; + ttCalleeRewrites = paramIndex2rewrite_d_s.get( paramIndex_j ); + assert ttCalleeRewrites != null; + + } else if( ogCallee.paramTokenSecondaryPlus2paramIndex.containsKey( ttCallee ) ) { + // worse, use big D + Integer paramIndex_j = ogCallee.paramTokenSecondaryPlus2paramIndex.get( ttCallee ); + assert paramIndex_j != null; + ttCalleeRewrites = paramIndex2rewriteD.get( paramIndex_j ); + assert ttCalleeRewrites != null; + + } else if( ogCallee.paramTokenSecondaryStar2paramIndex.containsKey( ttCallee ) ) { + // worse, use big D + Integer paramIndex_j = ogCallee.paramTokenSecondaryStar2paramIndex.get( ttCallee ); + assert paramIndex_j != null; + ttCalleeRewrites = paramIndex2rewriteD.get( paramIndex_j ); + assert ttCalleeRewrites != null; + + } else { + // otherwise there's no need for a rewrite, just pass this one on + ReachState ttsCaller = new ReachTupleSet( ttCallee ).makeCanonical(); + ttCalleeRewrites = new ReachSet( ttsCaller ).makeCanonical(); + } + + // branch every version of the working rewritten rule with + // the possibilities for rewriting the current callee token + ReachSet rewrittenRuleWithTTCallee = new ReachSet().makeCanonical(); + + Iterator rewrittenRuleItr = rewrittenRule.iterator(); + while( rewrittenRuleItr.hasNext() ) { + ReachState ttsRewritten = rewrittenRuleItr.next(); + + Iterator ttCalleeRewritesItr = ttCalleeRewrites.iterator(); + while( ttCalleeRewritesItr.hasNext() ) { + ReachState ttsBranch = ttCalleeRewritesItr.next(); + + ReachState ttsRewrittenNext = ttsRewritten.unionUpArity( ttsBranch ); + + if( makeChangeSet ) { + // in order to keep the list of source token tuple sets + // start with the sets used to make the partially rewritten + // rule up to this point + HashSet sourceSets = rewritten2source.get( ttsRewritten ); + assert sourceSets != null; + + // make a shallow copy for possible modification + sourceSets = (HashSet) sourceSets.clone(); + + // if we used something from the caller to rewrite it, remember + if( callerSourceUsed ) { + sourceSets.add( ttsBranch ); + } + + // set mapping for the further rewritten rule + rewritten2source.put( ttsRewrittenNext, sourceSets ); + } + + rewrittenRuleWithTTCallee = + rewrittenRuleWithTTCallee.union( ttsRewrittenNext ); + } + } + + // now the rewritten rule's possibilities have been extended by + // rewriting the current callee token, remember result + rewrittenRule = rewrittenRuleWithTTCallee; + } + + // the rule has been entirely rewritten into the caller context + // now, so add it to the new reachability information + callerReachabilityNew = + callerReachabilityNew.union( rewrittenRule ); + } + + if( makeChangeSet ) { + ChangeSet callerChangeSet = new ChangeSet().makeCanonical(); + + // each possibility for the final reachability should have a set of + // caller sources mapped to it, use to create the change set + Iterator callerReachabilityItr = callerReachabilityNew.iterator(); + while( callerReachabilityItr.hasNext() ) { + ReachState ttsRewrittenFinal = callerReachabilityItr.next(); + HashSet sourceSets = rewritten2source.get( ttsRewrittenFinal ); + assert sourceSets != null; + + Iterator sourceSetsItr = sourceSets.iterator(); + while( sourceSetsItr.hasNext() ) { + ReachState ttsSource = sourceSetsItr.next(); + + callerChangeSet = + callerChangeSet.union( new ChangeTuple( ttsSource, ttsRewrittenFinal ) ); + } + } + + assert edgePlannedChanges != null; + edgePlannedChanges.put( edge, callerChangeSet ); + } + + if( hrn == null ) { + edge.setBetaNew( edge.getBetaNew().union( callerReachabilityNew ) ); + } else { + hrn.setAlphaNew( hrn.getAlphaNew().union( callerReachabilityNew ) ); + } + } + + + + private HashSet + getHRNSetThatPossiblyMapToCalleeHRN( ReachGraph ogCallee, + HeapRegionNode hrnCallee, + Hashtable > pi2dr, + Hashtable > pi2r + ) { + + HashSet possibleCallerHRNs = new HashSet(); + + Set paramIndicesCallee_p = ogCallee.idPrimary2paramIndexSet .get( hrnCallee.getID() ); + Set paramIndicesCallee_s = ogCallee.idSecondary2paramIndexSet.get( hrnCallee.getID() ); + + if( paramIndicesCallee_p == null && + paramIndicesCallee_s == null ) { + // this is a node allocated in the callee and it has + // exactly one shadow node in the caller to map to + AllocSite as = hrnCallee.getAllocSite(); + assert as != null; + + int age = as.getAgeCategory( hrnCallee.getID() ); + assert age != AllocSite.AGE_notInThisSite; + + Integer idCaller; + if( age == AllocSite.AGE_summary ) { + idCaller = as.getSummaryShadow(); + + } else if( age == AllocSite.AGE_oldest ) { + idCaller = as.getOldestShadow(); + + } else { + assert age == AllocSite.AGE_in_I; + + Integer I = as.getAge( hrnCallee.getID() ); + assert I != null; + + idCaller = as.getIthOldestShadow( I ); + } + + assert id2hrn.containsKey( idCaller ); + possibleCallerHRNs.add( id2hrn.get( idCaller ) ); + + return possibleCallerHRNs; + } + + // find out what primary objects this might be + if( paramIndicesCallee_p != null ) { + // this is a node that was created to represent a parameter + // so it maps to some regions directly reachable from the arg labels + Iterator itrIndex = paramIndicesCallee_p.iterator(); + while( itrIndex.hasNext() ) { + Integer paramIndexCallee = itrIndex.next(); + assert pi2dr.containsKey( paramIndexCallee ); + possibleCallerHRNs.addAll( pi2dr.get( paramIndexCallee ) ); + } + } + + // find out what secondary objects this might be + if( paramIndicesCallee_s != null ) { + // this is a node that was created to represent objs reachable from + // some parameter, so it maps to regions reachable from the arg labels + Iterator itrIndex = paramIndicesCallee_s.iterator(); + while( itrIndex.hasNext() ) { + Integer paramIndexCallee = itrIndex.next(); + assert pi2r.containsKey( paramIndexCallee ); + possibleCallerHRNs.addAll( pi2r.get( paramIndexCallee ) ); + } + } + + // TODO: is this true? + // one of the two cases above should have put something in here + //assert !possibleCallerHRNs.isEmpty(); + + return possibleCallerHRNs; + } + + + + //////////////////////////////////////////////////// + // + // This global sweep is an optional step to prune + // reachability sets that are not internally + // consistent with the global graph. It should be + // invoked after strong updates or method calls. + // + //////////////////////////////////////////////////// + public void globalSweep() { + + // boldB is part of the phase 1 sweep + Hashtable< Integer, Hashtable > boldB = + new Hashtable< Integer, Hashtable >(); + + // visit every heap region to initialize alphaNew and calculate boldB + Set hrns = id2hrn.entrySet(); + Iterator itrHrns = hrns.iterator(); + while( itrHrns.hasNext() ) { + Map.Entry me = (Map.Entry)itrHrns.next(); + Integer token = (Integer) me.getKey(); + HeapRegionNode hrn = (HeapRegionNode) me.getValue(); + + // assert that this node and incoming edges have clean alphaNew + // and betaNew sets, respectively + assert rsEmpty.equals( hrn.getAlphaNew() ); + + Iterator itrRers = hrn.iteratorToReferencers(); + while( itrRers.hasNext() ) { + RefEdge edge = itrRers.next(); + assert rsEmpty.equals( edge.getBetaNew() ); + } + + // calculate boldB for this flagged node + if( hrn.isFlagged() || hrn.isParameter() ) { + + Hashtable boldB_f = + new Hashtable(); + + Set workSetEdges = new HashSet(); + + // initial boldB_f constraints + Iterator itrRees = hrn.iteratorToReferencees(); + while( itrRees.hasNext() ) { + RefEdge edge = itrRees.next(); + + assert !boldB.containsKey( edge ); + boldB_f.put( edge, edge.getBeta() ); + + assert !workSetEdges.contains( edge ); + workSetEdges.add( edge ); + } + + // enforce the boldB_f constraint at edges until we reach a fixed point + while( !workSetEdges.isEmpty() ) { + RefEdge edge = workSetEdges.iterator().next(); + workSetEdges.remove( edge ); + + Iterator itrPrime = edge.getDst().iteratorToReferencees(); + while( itrPrime.hasNext() ) { + RefEdge edgePrime = itrPrime.next(); + + ReachSet prevResult = boldB_f.get( edgePrime ); + ReachSet intersection = boldB_f.get( edge ).intersection( edgePrime.getBeta() ); + + if( prevResult == null || + prevResult.union( intersection ).size() > prevResult.size() ) { + + if( prevResult == null ) { + boldB_f.put( edgePrime, edgePrime.getBeta().union( intersection ) ); + } else { + boldB_f.put( edgePrime, prevResult .union( intersection ) ); + } + workSetEdges.add( edgePrime ); + } + } + } + + boldB.put( token, boldB_f ); + } + } + + + // use boldB to prune tokens from alpha states that are impossible + // and propagate the differences backwards across edges + HashSet edgesForPropagation = new HashSet(); + + Hashtable edgePlannedChanges = + new Hashtable(); + + hrns = id2hrn.entrySet(); + itrHrns = hrns.iterator(); + while( itrHrns.hasNext() ) { + Map.Entry me = (Map.Entry)itrHrns.next(); + Integer token = (Integer) me.getKey(); + HeapRegionNode hrn = (HeapRegionNode) me.getValue(); + + // never remove the identity token from a flagged region + // because it is trivially satisfied + ReachTuple ttException = new ReachTuple( token, + !hrn.isSingleObject(), + ReachTuple.ARITY_ONE ).makeCanonical(); + + ChangeSet cts = new ChangeSet().makeCanonical(); + + // mark tokens for removal + Iterator stateItr = hrn.getAlpha().iterator(); + while( stateItr.hasNext() ) { + ReachState ttsOld = stateItr.next(); + + ReachState markedTokens = new ReachTupleSet().makeCanonical(); + + Iterator ttItr = ttsOld.iterator(); + while( ttItr.hasNext() ) { + ReachTuple ttOld = ttItr.next(); + + // never remove the identity token from a flagged region + // because it is trivially satisfied + if( hrn.isFlagged() || hrn.isParameter() ) { + if( ttOld == ttException ) { + continue; + } + } + + // does boldB_ttOld allow this token? + boolean foundState = false; + Iterator incidentEdgeItr = hrn.iteratorToReferencers(); + while( incidentEdgeItr.hasNext() ) { + RefEdge incidentEdge = incidentEdgeItr.next(); + + // if it isn't allowed, mark for removal + Integer idOld = ttOld.getToken(); + assert id2hrn.containsKey( idOld ); + Hashtable B = boldB.get( idOld ); + ReachSet boldB_ttOld_incident = B.get( incidentEdge );// B is NULL! + if( boldB_ttOld_incident != null && + boldB_ttOld_incident.contains( ttsOld ) ) { + foundState = true; + } + } + + if( !foundState ) { + markedTokens = markedTokens.add( ttOld ); + } + } + + // if there is nothing marked, just move on + if( markedTokens.isEmpty() ) { + hrn.setAlphaNew( hrn.getAlphaNew().union( ttsOld ) ); + continue; + } + + // remove all marked tokens and establish a change set that should + // propagate backwards over edges from this node + ReachState ttsPruned = new ReachTupleSet().makeCanonical(); + ttItr = ttsOld.iterator(); + while( ttItr.hasNext() ) { + ReachTuple ttOld = ttItr.next(); + + if( !markedTokens.containsTuple( ttOld ) ) { + ttsPruned = ttsPruned.union( ttOld ); + } + } + assert !ttsOld.equals( ttsPruned ); + + hrn.setAlphaNew( hrn.getAlphaNew().union( ttsPruned ) ); + ChangeTuple ct = new ChangeTuple( ttsOld, ttsPruned ).makeCanonical(); + cts = cts.union( ct ); + } + + // throw change tuple set on all incident edges + if( !cts.isEmpty() ) { + Iterator incidentEdgeItr = hrn.iteratorToReferencers(); + while( incidentEdgeItr.hasNext() ) { + RefEdge incidentEdge = incidentEdgeItr.next(); + + edgesForPropagation.add( incidentEdge ); + + if( edgePlannedChanges.get( incidentEdge ) == null ) { + edgePlannedChanges.put( incidentEdge, cts ); + } else { + edgePlannedChanges.put( + incidentEdge, + edgePlannedChanges.get( incidentEdge ).union( cts ) + ); + } + } + } + } + + HashSet edgesUpdated = new HashSet(); + + propagateTokensOverEdges( edgesForPropagation, + edgePlannedChanges, + edgesUpdated ); + + // at the end of the 1st phase reference edges have + // beta, betaNew that correspond to beta and betaR + // + // commit beta<-betaNew, so beta=betaR and betaNew + // will represent the beta' calculation in 2nd phase + // + // commit alpha<-alphaNew because it won't change + HashSet res = new HashSet(); + + Iterator nodeItr = id2hrn.values().iterator(); + while( nodeItr.hasNext() ) { + HeapRegionNode hrn = nodeItr.next(); + hrn.applyAlphaNew(); + Iterator itrRes = hrn.iteratorToReferencers(); + while( itrRes.hasNext() ) { + res.add( itrRes.next() ); + } + } + + + // 2nd phase + Iterator edgeItr = res.iterator(); + while( edgeItr.hasNext() ) { + RefEdge edge = edgeItr.next(); + HeapRegionNode hrn = edge.getDst(); + + // commit results of last phase + if( edgesUpdated.contains( edge ) ) { + edge.applyBetaNew(); + } + + // compute intial condition of 2nd phase + edge.setBetaNew( edge.getBeta().intersection( hrn.getAlpha() ) ); + } + + // every edge in the graph is the initial workset + Set edgeWorkSet = (Set) res.clone(); + while( !edgeWorkSet.isEmpty() ) { + RefEdge edgePrime = edgeWorkSet.iterator().next(); + edgeWorkSet.remove( edgePrime ); + + RefSrcNode on = edgePrime.getSrc(); + if( !(on instanceof HeapRegionNode) ) { + continue; + } + HeapRegionNode hrn = (HeapRegionNode) on; + + Iterator itrEdge = hrn.iteratorToReferencers(); + while( itrEdge.hasNext() ) { + RefEdge edge = itrEdge.next(); + + ReachSet prevResult = edge.getBetaNew(); + assert prevResult != null; + + ReachSet intersection = edge.getBeta().intersection( edgePrime.getBetaNew() ); + + if( prevResult.union( intersection ).size() > prevResult.size() ) { + edge.setBetaNew( prevResult.union( intersection ) ); + edgeWorkSet.add( edge ); + } + } + } + + // commit beta' (beta<-betaNew) + edgeItr = res.iterator(); + while( edgeItr.hasNext() ) { + edgeItr.next().applyBetaNew(); + } + } + + + + //////////////////////////////////////////////////// + // in merge() and equals() methods the suffix A + // represents the passed in graph and the suffix + // B refers to the graph in this object + // Merging means to take the incoming graph A and + // merge it into B, so after the operation graph B + // is the final result. + //////////////////////////////////////////////////// + public void merge(ReachGraph og) { + + if( og == null ) { + return; + } + + mergeRefSrcNodes(og); + mergeRefEdges(og); + mergeParamIndexMappings(og); + mergeAllocSites(og); + mergeAccessPaths(og); + mergeTempAndLabelCategories(og); + } + + + protected void mergeRefSrcNodes(ReachGraph og) { + Set sA = og.id2hrn.entrySet(); + Iterator iA = sA.iterator(); + while( iA.hasNext() ) { + Map.Entry meA = (Map.Entry)iA.next(); + Integer idA = (Integer) meA.getKey(); + HeapRegionNode hrnA = (HeapRegionNode) meA.getValue(); + + // if this graph doesn't have a node the + // incoming graph has, allocate it + if( !id2hrn.containsKey(idA) ) { + HeapRegionNode hrnB = hrnA.copy(); + id2hrn.put(idA, hrnB); + + } else { + // otherwise this is a node present in both graphs + // so make the new reachability set a union of the + // nodes' reachability sets + HeapRegionNode hrnB = id2hrn.get(idA); + hrnB.setAlpha(hrnB.getAlpha().union(hrnA.getAlpha() ) ); + } + } + + // now add any label nodes that are in graph B but + // not in A + sA = og.td2vn.entrySet(); + iA = sA.iterator(); + while( iA.hasNext() ) { + Map.Entry meA = (Map.Entry)iA.next(); + TempDescriptor tdA = (TempDescriptor) meA.getKey(); + VariableNode lnA = (VariableNode) meA.getValue(); + + // if the label doesn't exist in B, allocate and add it + VariableNode lnB = getVariableNodeFromTemp(tdA); + } + } + + protected void mergeRefEdges(ReachGraph og) { + + // heap regions + Set sA = og.id2hrn.entrySet(); + Iterator iA = sA.iterator(); + while( iA.hasNext() ) { + Map.Entry meA = (Map.Entry)iA.next(); + Integer idA = (Integer) meA.getKey(); + HeapRegionNode hrnA = (HeapRegionNode) meA.getValue(); + + Iterator heapRegionsItrA = hrnA.iteratorToReferencees(); + while( heapRegionsItrA.hasNext() ) { + RefEdge edgeA = heapRegionsItrA.next(); + HeapRegionNode hrnChildA = edgeA.getDst(); + Integer idChildA = hrnChildA.getID(); + + // at this point we know an edge in graph A exists + // idA -> idChildA, does this exist in B? + assert id2hrn.containsKey(idA); + HeapRegionNode hrnB = id2hrn.get(idA); + RefEdge edgeToMerge = null; + + Iterator heapRegionsItrB = hrnB.iteratorToReferencees(); + while( heapRegionsItrB.hasNext() && + edgeToMerge == null ) { + + RefEdge edgeB = heapRegionsItrB.next(); + HeapRegionNode hrnChildB = edgeB.getDst(); + Integer idChildB = hrnChildB.getID(); + + // don't use the RefEdge.equals() here because + // we're talking about existence between graphs + if( idChildB.equals( idChildA ) && + edgeB.typeAndFieldEquals( edgeA ) ) { + + edgeToMerge = edgeB; + } + } + + // if the edge from A was not found in B, + // add it to B. + if( edgeToMerge == null ) { + assert id2hrn.containsKey(idChildA); + HeapRegionNode hrnChildB = id2hrn.get(idChildA); + edgeToMerge = edgeA.copy(); + edgeToMerge.setSrc(hrnB); + edgeToMerge.setDst(hrnChildB); + addRefEdge(hrnB, hrnChildB, edgeToMerge); + } + // otherwise, the edge already existed in both graphs + // so merge their reachability sets + else { + // just replace this beta set with the union + assert edgeToMerge != null; + edgeToMerge.setBeta( + edgeToMerge.getBeta().union(edgeA.getBeta() ) + ); + //TODO eom + edgeToMerge.unionTaintIdentifier(edgeA.getTaintIdentifier()); + if( !edgeA.isInitialParam() ) { + edgeToMerge.setIsInitialParam(false); + } + } + } + } + + // and then again with label nodes + sA = og.td2vn.entrySet(); + iA = sA.iterator(); + while( iA.hasNext() ) { + Map.Entry meA = (Map.Entry)iA.next(); + TempDescriptor tdA = (TempDescriptor) meA.getKey(); + VariableNode lnA = (VariableNode) meA.getValue(); + + Iterator heapRegionsItrA = lnA.iteratorToReferencees(); + while( heapRegionsItrA.hasNext() ) { + RefEdge edgeA = heapRegionsItrA.next(); + HeapRegionNode hrnChildA = edgeA.getDst(); + Integer idChildA = hrnChildA.getID(); + + // at this point we know an edge in graph A exists + // tdA -> idChildA, does this exist in B? + assert td2vn.containsKey(tdA); + VariableNode lnB = td2vn.get(tdA); + RefEdge edgeToMerge = null; + + Iterator heapRegionsItrB = lnB.iteratorToReferencees(); + while( heapRegionsItrB.hasNext() && + edgeToMerge == null ) { + + RefEdge edgeB = heapRegionsItrB.next(); + HeapRegionNode hrnChildB = edgeB.getDst(); + Integer idChildB = hrnChildB.getID(); + + // don't use the RefEdge.equals() here because + // we're talking about existence between graphs + if( idChildB.equals( idChildA ) && + edgeB.typeAndFieldEquals( edgeA ) ) { + + edgeToMerge = edgeB; + } + } + + // if the edge from A was not found in B, + // add it to B. + if( edgeToMerge == null ) { + assert id2hrn.containsKey(idChildA); + HeapRegionNode hrnChildB = id2hrn.get(idChildA); + edgeToMerge = edgeA.copy(); + edgeToMerge.setSrc(lnB); + edgeToMerge.setDst(hrnChildB); + addRefEdge(lnB, hrnChildB, edgeToMerge); + } + // otherwise, the edge already existed in both graphs + // so merge their reachability sets + else { + // just replace this beta set with the union + edgeToMerge.setBeta( + edgeToMerge.getBeta().union(edgeA.getBeta() ) + ); + edgeToMerge.unionTaintIdentifier(edgeA.getTaintIdentifier()); + if( !edgeA.isInitialParam() ) { + edgeToMerge.setIsInitialParam(false); + } + } + } + } + } + + // you should only merge ownership graphs that have the + // same number of parameters, or if one or both parameter + // index tables are empty + protected void mergeParamIndexMappings(ReachGraph og) { + + if( idPrimary2paramIndexSet.size() == 0 ) { + + idPrimary2paramIndexSet = og.idPrimary2paramIndexSet; + paramIndex2idPrimary = og.paramIndex2idPrimary; + + idSecondary2paramIndexSet = og.idSecondary2paramIndexSet; + paramIndex2idSecondary = og.paramIndex2idSecondary; + + paramIndex2tdQ = og.paramIndex2tdQ; + paramIndex2tdR = og.paramIndex2tdR; + + paramTokenPrimary2paramIndex = og.paramTokenPrimary2paramIndex; + paramIndex2paramTokenPrimary = og.paramIndex2paramTokenPrimary; + + paramTokenSecondary2paramIndex = og.paramTokenSecondary2paramIndex; + paramIndex2paramTokenSecondary = og.paramIndex2paramTokenSecondary; + paramTokenSecondaryPlus2paramIndex = og.paramTokenSecondaryPlus2paramIndex; + paramIndex2paramTokenSecondaryPlus = og.paramIndex2paramTokenSecondaryPlus; + paramTokenSecondaryStar2paramIndex = og.paramTokenSecondaryStar2paramIndex; + paramIndex2paramTokenSecondaryStar = og.paramIndex2paramTokenSecondaryStar; + + return; + } + + if( og.idPrimary2paramIndexSet.size() == 0 ) { + + og.idPrimary2paramIndexSet = idPrimary2paramIndexSet; + og.paramIndex2idPrimary = paramIndex2idPrimary; + + og.idSecondary2paramIndexSet = idSecondary2paramIndexSet; + og.paramIndex2idSecondary = paramIndex2idSecondary; + + og.paramIndex2tdQ = paramIndex2tdQ; + og.paramIndex2tdR = paramIndex2tdR; + + og.paramTokenPrimary2paramIndex = paramTokenPrimary2paramIndex; + og.paramIndex2paramTokenPrimary = paramIndex2paramTokenPrimary; + + og.paramTokenSecondary2paramIndex = paramTokenSecondary2paramIndex; + og.paramIndex2paramTokenSecondary = paramIndex2paramTokenSecondary; + og.paramTokenSecondaryPlus2paramIndex = paramTokenSecondaryPlus2paramIndex; + og.paramIndex2paramTokenSecondaryPlus = paramIndex2paramTokenSecondaryPlus; + og.paramTokenSecondaryStar2paramIndex = paramTokenSecondaryStar2paramIndex; + og.paramIndex2paramTokenSecondaryStar = paramIndex2paramTokenSecondaryStar; + + return; + } + + assert idPrimary2paramIndexSet.size() == og.idPrimary2paramIndexSet.size(); + assert idSecondary2paramIndexSet.size() == og.idSecondary2paramIndexSet.size(); + } + + protected void mergeAllocSites(ReachGraph og) { + allocSites.addAll(og.allocSites); + } + + protected void mergeAccessPaths(ReachGraph og) { + UtilAlgorithms.mergeHashtablesWithHashSetValues(temp2accessPaths, + og.temp2accessPaths); + } + + protected void mergeTempAndLabelCategories(ReachGraph og) { + outOfScopeTemps.addAll(og.outOfScopeTemps); + outOfScopeLabels.addAll(og.outOfScopeLabels); + parameterTemps.addAll(og.parameterTemps); + parameterLabels.addAll(og.parameterLabels); + } + + + + // it is necessary in the equals() member functions + // to "check both ways" when comparing the data + // structures of two graphs. For instance, if all + // edges between heap region nodes in graph A are + // present and equal in graph B it is not sufficient + // to say the graphs are equal. Consider that there + // may be edges in graph B that are not in graph A. + // the only way to know that all edges in both graphs + // are equally present is to iterate over both data + // structures and compare against the other graph. + public boolean equals(ReachGraph og) { + + if( og == null ) { + return false; + } + + if( !areHeapRegionNodesEqual(og) ) { + return false; + } + + if( !areVariableNodesEqual(og) ) { + return false; + } + + if( !areRefEdgesEqual(og) ) { + return false; + } + + if( !areParamIndexMappingsEqual(og) ) { + return false; + } + + if( !areAccessPathsEqual(og) ) { + return false; + } + + // if everything is equal up to this point, + // assert that allocSites is also equal-- + // this data is redundant and kept for efficiency + assert allocSites .equals(og.allocSites ); + assert outOfScopeTemps .equals(og.outOfScopeTemps ); + assert outOfScopeLabels.equals(og.outOfScopeLabels); + assert parameterTemps .equals(og.parameterTemps ); + assert parameterLabels .equals(og.parameterLabels ); + + return true; + } + + protected boolean areHeapRegionNodesEqual(ReachGraph og) { + + if( !areallHRNinAalsoinBandequal(this, og) ) { + return false; + } + + if( !areallHRNinAalsoinBandequal(og, this) ) { + return false; + } + + return true; + } + + static protected boolean areallHRNinAalsoinBandequal(ReachGraph ogA, + ReachGraph ogB) { + Set sA = ogA.id2hrn.entrySet(); + Iterator iA = sA.iterator(); + while( iA.hasNext() ) { + Map.Entry meA = (Map.Entry)iA.next(); + Integer idA = (Integer) meA.getKey(); + HeapRegionNode hrnA = (HeapRegionNode) meA.getValue(); + + if( !ogB.id2hrn.containsKey(idA) ) { + return false; + } + + HeapRegionNode hrnB = ogB.id2hrn.get(idA); + if( !hrnA.equalsIncludingAlpha(hrnB) ) { + return false; + } + } + + return true; + } + + + protected boolean areVariableNodesEqual(ReachGraph og) { + + if( !areallLNinAalsoinBandequal(this, og) ) { + return false; + } + + if( !areallLNinAalsoinBandequal(og, this) ) { + return false; + } + + return true; + } + + static protected boolean areallLNinAalsoinBandequal(ReachGraph ogA, + ReachGraph ogB) { + Set sA = ogA.td2vn.entrySet(); + Iterator iA = sA.iterator(); + while( iA.hasNext() ) { + Map.Entry meA = (Map.Entry)iA.next(); + TempDescriptor tdA = (TempDescriptor) meA.getKey(); + + if( !ogB.td2vn.containsKey(tdA) ) { + return false; + } + } + + return true; + } + + + protected boolean areRefEdgesEqual(ReachGraph og) { + if( !areallREinAandBequal(this, og) ) { + return false; + } + + return true; + } + + static protected boolean areallREinAandBequal(ReachGraph ogA, + ReachGraph ogB) { + + // check all the heap region->heap region edges + Set sA = ogA.id2hrn.entrySet(); + Iterator iA = sA.iterator(); + while( iA.hasNext() ) { + Map.Entry meA = (Map.Entry)iA.next(); + Integer idA = (Integer) meA.getKey(); + HeapRegionNode hrnA = (HeapRegionNode) meA.getValue(); + + // we should have already checked that the same + // heap regions exist in both graphs + assert ogB.id2hrn.containsKey(idA); + + if( !areallREfromAequaltoB(ogA, hrnA, ogB) ) { + return false; + } + + // then check every edge in B for presence in A, starting + // from the same parent HeapRegionNode + HeapRegionNode hrnB = ogB.id2hrn.get(idA); + + if( !areallREfromAequaltoB(ogB, hrnB, ogA) ) { + return false; + } + } + + // then check all the label->heap region edges + sA = ogA.td2vn.entrySet(); + iA = sA.iterator(); + while( iA.hasNext() ) { + Map.Entry meA = (Map.Entry)iA.next(); + TempDescriptor tdA = (TempDescriptor) meA.getKey(); + VariableNode lnA = (VariableNode) meA.getValue(); + + // we should have already checked that the same + // label nodes exist in both graphs + assert ogB.td2vn.containsKey(tdA); + + if( !areallREfromAequaltoB(ogA, lnA, ogB) ) { + return false; + } + + // then check every edge in B for presence in A, starting + // from the same parent VariableNode + VariableNode lnB = ogB.td2vn.get(tdA); + + if( !areallREfromAequaltoB(ogB, lnB, ogA) ) { + return false; + } + } + + return true; + } + + + static protected boolean areallREfromAequaltoB(ReachGraph ogA, + RefSrcNode onA, + ReachGraph ogB) { + + Iterator itrA = onA.iteratorToReferencees(); + while( itrA.hasNext() ) { + RefEdge edgeA = itrA.next(); + HeapRegionNode hrnChildA = edgeA.getDst(); + Integer idChildA = hrnChildA.getID(); + + assert ogB.id2hrn.containsKey(idChildA); + + // at this point we know an edge in graph A exists + // onA -> idChildA, does this exact edge exist in B? + boolean edgeFound = false; + + RefSrcNode onB = null; + if( onA instanceof HeapRegionNode ) { + HeapRegionNode hrnA = (HeapRegionNode) onA; + onB = ogB.id2hrn.get(hrnA.getID() ); + } else { + VariableNode lnA = (VariableNode) onA; + onB = ogB.td2vn.get(lnA.getTempDescriptor() ); + } + + Iterator itrB = onB.iteratorToReferencees(); + while( itrB.hasNext() ) { + RefEdge edgeB = itrB.next(); + HeapRegionNode hrnChildB = edgeB.getDst(); + Integer idChildB = hrnChildB.getID(); + + if( idChildA.equals( idChildB ) && + edgeA.typeAndFieldEquals( edgeB ) ) { + + // there is an edge in the right place with the right field, + // but do they have the same attributes? + if( edgeA.getBeta().equals(edgeB.getBeta() ) ) { + edgeFound = true; + } + } + } + + if( !edgeFound ) { + return false; + } + } + + return true; + } + + + protected boolean areParamIndexMappingsEqual(ReachGraph og) { + + if( idPrimary2paramIndexSet.size() != og.idPrimary2paramIndexSet.size() ) { + return false; + } + + if( idSecondary2paramIndexSet.size() != og.idSecondary2paramIndexSet.size() ) { + return false; + } + + return true; + } + + + protected boolean areAccessPathsEqual(ReachGraph og) { + return temp2accessPaths.equals( og.temp2accessPaths ); + } + + + + public Set hasPotentialAlias( HeapRegionNode hrn1, HeapRegionNode hrn2 ) { + assert hrn1 != null; + assert hrn2 != null; + + // then get the various tokens for these heap regions + ReachTuple h1 = new ReachTuple(hrn1.getID(), + !hrn1.isSingleObject(), + ReachTuple.ARITY_ONE).makeCanonical(); + + ReachTuple h1plus = new ReachTuple(hrn1.getID(), + !hrn1.isSingleObject(), + ReachTuple.ARITY_ONEORMORE).makeCanonical(); + + ReachTuple h1star = new ReachTuple(hrn1.getID(), + !hrn1.isSingleObject(), + ReachTuple.ARITY_ZEROORMORE).makeCanonical(); + + ReachTuple h2 = new ReachTuple(hrn2.getID(), + !hrn2.isSingleObject(), + ReachTuple.ARITY_ONE).makeCanonical(); + + ReachTuple h2plus = new ReachTuple(hrn2.getID(), + !hrn2.isSingleObject(), + ReachTuple.ARITY_ONEORMORE).makeCanonical(); + + ReachTuple h2star = new ReachTuple(hrn2.getID(), + !hrn2.isSingleObject(), + ReachTuple.ARITY_ZEROORMORE).makeCanonical(); + + // then get the merged beta of all out-going edges from these heap regions + ReachSet beta1 = new ReachSet().makeCanonical(); + Iterator itrEdge = hrn1.iteratorToReferencees(); + while( itrEdge.hasNext() ) { + RefEdge edge = itrEdge.next(); + beta1 = beta1.union( edge.getBeta() ); + } + + ReachSet beta2 = new ReachSet().makeCanonical(); + itrEdge = hrn2.iteratorToReferencees(); + while( itrEdge.hasNext() ) { + RefEdge edge = itrEdge.next(); + beta2 = beta2.union( edge.getBeta() ); + } + + boolean aliasDetected = false; + + // only do this one if they are different tokens + if( h1 != h2 && + beta1.containsTupleSetWithBoth(h1, h2) ) { + aliasDetected = true; + } + if( beta1.containsTupleSetWithBoth(h1plus, h2) ) { + aliasDetected = true; + } + if( beta1.containsTupleSetWithBoth(h1star, h2) ) { + aliasDetected = true; + } + if( beta1.containsTupleSetWithBoth(h1, h2plus) ) { + aliasDetected = true; + } + if( beta1.containsTupleSetWithBoth(h1plus, h2plus) ) { + aliasDetected = true; + } + if( beta1.containsTupleSetWithBoth(h1star, h2plus) ) { + aliasDetected = true; + } + if( beta1.containsTupleSetWithBoth(h1, h2star) ) { + aliasDetected = true; + } + if( beta1.containsTupleSetWithBoth(h1plus, h2star) ) { + aliasDetected = true; + } + if( beta1.containsTupleSetWithBoth(h1star, h2star) ) { + aliasDetected = true; + } + + if( h1 != h2 && + beta2.containsTupleSetWithBoth(h1, h2) ) { + aliasDetected = true; + } + if( beta2.containsTupleSetWithBoth(h1plus, h2) ) { + aliasDetected = true; + } + if( beta2.containsTupleSetWithBoth(h1star, h2) ) { + aliasDetected = true; + } + if( beta2.containsTupleSetWithBoth(h1, h2plus) ) { + aliasDetected = true; + } + if( beta2.containsTupleSetWithBoth(h1plus, h2plus) ) { + aliasDetected = true; + } + if( beta2.containsTupleSetWithBoth(h1star, h2plus) ) { + aliasDetected = true; + } + if( beta2.containsTupleSetWithBoth(h1, h2star) ) { + aliasDetected = true; + } + if( beta2.containsTupleSetWithBoth(h1plus, h2star) ) { + aliasDetected = true; + } + if( beta2.containsTupleSetWithBoth(h1star, h2star) ) { + aliasDetected = true; + } + + Set common = new HashSet(); + if( aliasDetected ) { + common = findCommonReachableNodes( hrn1, hrn2 ); + if( !(DISABLE_STRONG_UPDATES || DISABLE_GLOBAL_SWEEP) ) { + assert !common.isEmpty(); + } + } + + return common; + } + + + public Set hasPotentialAlias(Integer paramIndex1, Integer paramIndex2) { + + // get parameter 1's heap regions + assert paramIndex2idPrimary.containsKey(paramIndex1); + Integer idParamPri1 = paramIndex2idPrimary.get(paramIndex1); + + assert id2hrn.containsKey(idParamPri1); + HeapRegionNode hrnParamPri1 = id2hrn.get(idParamPri1); + assert hrnParamPri1 != null; + + HeapRegionNode hrnParamSec1 = null; + if( paramIndex2idSecondary.containsKey(paramIndex1) ) { + Integer idParamSec1 = paramIndex2idSecondary.get(paramIndex1); + + assert id2hrn.containsKey(idParamSec1); + hrnParamSec1 = id2hrn.get(idParamSec1); + assert hrnParamSec1 != null; + } + + + // get the other parameter + assert paramIndex2idPrimary.containsKey(paramIndex2); + Integer idParamPri2 = paramIndex2idPrimary.get(paramIndex2); + + assert id2hrn.containsKey(idParamPri2); + HeapRegionNode hrnParamPri2 = id2hrn.get(idParamPri2); + assert hrnParamPri2 != null; + + HeapRegionNode hrnParamSec2 = null; + if( paramIndex2idSecondary.containsKey(paramIndex2) ) { + Integer idParamSec2 = paramIndex2idSecondary.get(paramIndex2); + + assert id2hrn.containsKey(idParamSec2); + hrnParamSec2 = id2hrn.get(idParamSec2); + assert hrnParamSec2 != null; + } + + Set common = new HashSet(); + common.addAll( hasPotentialAlias( hrnParamPri1, hrnParamPri2 ) ); + + if( hrnParamSec1 != null ) { + common.addAll( hasPotentialAlias( hrnParamSec1, hrnParamPri2 ) ); + } + + if( hrnParamSec2 != null ) { + common.addAll( hasPotentialAlias( hrnParamSec2, hrnParamPri1 ) ); + } + + if( hrnParamSec1 != null && hrnParamSec2 != null ) { + common.addAll( hasPotentialAlias( hrnParamSec1, hrnParamSec2 ) ); + } + + return common; + } + + + public Set hasPotentialAlias(Integer paramIndex, AllocSite as) { + + // get parameter's heap regions + assert paramIndex2idPrimary.containsKey(paramIndex); + Integer idParamPri = paramIndex2idPrimary.get(paramIndex); + + assert id2hrn.containsKey(idParamPri); + HeapRegionNode hrnParamPri = id2hrn.get(idParamPri); + assert hrnParamPri != null; + + HeapRegionNode hrnParamSec = null; + if( paramIndex2idSecondary.containsKey(paramIndex) ) { + Integer idParamSec = paramIndex2idSecondary.get(paramIndex); + + assert id2hrn.containsKey(idParamSec); + hrnParamSec = id2hrn.get(idParamSec); + assert hrnParamSec != null; + } + + // get summary node + assert id2hrn.containsKey( as.getSummary() ); + HeapRegionNode hrnSummary = id2hrn.get( as.getSummary() ); + assert hrnSummary != null; + + Set common = hasPotentialAlias( hrnParamPri, hrnSummary ); + + if( hrnParamSec != null ) { + common.addAll( hasPotentialAlias( hrnParamSec, hrnSummary ) ); + } + + // check for other nodes + for( int i = 0; i < as.getAllocationDepth(); ++i ) { + + assert id2hrn.containsKey( as.getIthOldest( i ) ); + HeapRegionNode hrnIthOldest = id2hrn.get( as.getIthOldest( i ) ); + assert hrnIthOldest != null; + + common = hasPotentialAlias( hrnParamPri, hrnIthOldest ); + + if( hrnParamSec != null ) { + common.addAll( hasPotentialAlias( hrnParamSec, hrnIthOldest ) ); + } + } + + return common; + } + + + public Set hasPotentialAlias(AllocSite as1, AllocSite as2) { + + // get summary node 1's alpha + Integer idSum1 = as1.getSummary(); + assert id2hrn.containsKey(idSum1); + HeapRegionNode hrnSum1 = id2hrn.get(idSum1); + assert hrnSum1 != null; + + // get summary node 2's alpha + Integer idSum2 = as2.getSummary(); + assert id2hrn.containsKey(idSum2); + HeapRegionNode hrnSum2 = id2hrn.get(idSum2); + assert hrnSum2 != null; + + Set common = hasPotentialAlias( hrnSum1, hrnSum2 ); + + // check sum2 against alloc1 nodes + for( int i = 0; i < as1.getAllocationDepth(); ++i ) { + Integer idI1 = as1.getIthOldest(i); + assert id2hrn.containsKey(idI1); + HeapRegionNode hrnI1 = id2hrn.get(idI1); + assert hrnI1 != null; + + common.addAll( hasPotentialAlias( hrnI1, hrnSum2 ) ); + } + + // check sum1 against alloc2 nodes + for( int i = 0; i < as2.getAllocationDepth(); ++i ) { + Integer idI2 = as2.getIthOldest(i); + assert id2hrn.containsKey(idI2); + HeapRegionNode hrnI2 = id2hrn.get(idI2); + assert hrnI2 != null; + + common.addAll( hasPotentialAlias( hrnSum1, hrnI2 ) ); + + // while we're at it, do an inner loop for alloc2 vs alloc1 nodes + for( int j = 0; j < as1.getAllocationDepth(); ++j ) { + Integer idI1 = as1.getIthOldest(j); + + // if these are the same site, don't look for the same token, no alias. + // different tokens of the same site could alias together though + if( idI1.equals( idI2 ) ) { + continue; + } + + HeapRegionNode hrnI1 = id2hrn.get(idI1); + + common.addAll( hasPotentialAlias( hrnI1, hrnI2 ) ); + } + } + + return common; + } + + + public Set findCommonReachableNodes( HeapRegionNode hrn1, + HeapRegionNode hrn2 ) { + + Set reachableNodes1 = new HashSet(); + Set reachableNodes2 = new HashSet(); + + Set todoNodes1 = new HashSet(); + todoNodes1.add( hrn1 ); + + Set todoNodes2 = new HashSet(); + todoNodes2.add( hrn2 ); + + // follow links until all reachable nodes have been found + while( !todoNodes1.isEmpty() ) { + HeapRegionNode hrn = todoNodes1.iterator().next(); + todoNodes1.remove( hrn ); + reachableNodes1.add(hrn); + + Iterator edgeItr = hrn.iteratorToReferencees(); + while( edgeItr.hasNext() ) { + RefEdge edge = edgeItr.next(); + + if( !reachableNodes1.contains( edge.getDst() ) ) { + todoNodes1.add( edge.getDst() ); + } + } + } + + while( !todoNodes2.isEmpty() ) { + HeapRegionNode hrn = todoNodes2.iterator().next(); + todoNodes2.remove( hrn ); + reachableNodes2.add(hrn); + + Iterator edgeItr = hrn.iteratorToReferencees(); + while( edgeItr.hasNext() ) { + RefEdge edge = edgeItr.next(); + + if( !reachableNodes2.contains( edge.getDst() ) ) { + todoNodes2.add( edge.getDst() ); + } + } + } + + Set intersection = + new HashSet( reachableNodes1 ); + + intersection.retainAll( reachableNodes2 ); + + return intersection; + } + + + public void writeGraph(String graphName, + boolean writeLabels, + boolean labelSelect, + boolean pruneGarbage, + boolean writeReferencers, + boolean writeParamMappings, + boolean hideSubsetReachability, + boolean hideEdgeTaints + ) throws java.io.IOException { + + // remove all non-word characters from the graph name so + // the filename and identifier in dot don't cause errors + graphName = graphName.replaceAll("[\\W]", ""); + + BufferedWriter bw = new BufferedWriter(new FileWriter(graphName+".dot") ); + bw.write("digraph "+graphName+" {\n"); + + HashSet visited = new HashSet(); + + // then visit every heap region node + Set s = id2hrn.entrySet(); + Iterator i = s.iterator(); + while( i.hasNext() ) { + Map.Entry me = (Map.Entry)i.next(); + HeapRegionNode hrn = (HeapRegionNode) me.getValue(); + + if( !pruneGarbage || + (hrn.isFlagged() && hrn.getID() > 0) || + hrn.getDescription().startsWith("param") + ) { + + if( !visited.contains(hrn) ) { + traverseHeapRegionNodes(VISIT_HRN_WRITE_FULL, + hrn, + bw, + null, + visited, + writeReferencers, + hideSubsetReachability, + hideEdgeTaints); + } + } + } + + bw.write(" graphTitle[label=\""+graphName+"\",shape=box];\n"); + } + + // then visit every label node, useful for debugging + if( writeLabels ) { + s = td2vn.entrySet(); + i = s.iterator(); + while( i.hasNext() ) { + Map.Entry me = (Map.Entry)i.next(); + VariableNode ln = (VariableNode) me.getValue(); + + if( labelSelect ) { + String labelStr = ln.getTempDescriptorString(); + if( labelStr.startsWith("___temp") || + labelStr.startsWith("___dst") || + labelStr.startsWith("___srctmp") || + labelStr.startsWith("___neverused") || + labelStr.contains(qString) || + labelStr.contains(rString) || + labelStr.contains(blobString) + ) { + continue; + } + } + + //bw.write(" "+ln.toString() + ";\n"); + + Iterator heapRegionsItr = ln.iteratorToReferencees(); + while( heapRegionsItr.hasNext() ) { + RefEdge edge = heapRegionsItr.next(); + HeapRegionNode hrn = edge.getDst(); + + if( pruneGarbage && !visited.contains(hrn) ) { + traverseHeapRegionNodes(VISIT_HRN_WRITE_FULL, + hrn, + bw, + null, + visited, + writeReferencers, + hideSubsetReachability, + hideEdgeTaints); + } + + bw.write(" " + ln.toString() + + " -> " + hrn.toString() + + "[label=\"" + edge.toGraphEdgeString(hideSubsetReachability, + hideEdgeTaints) + + "\",decorate];\n"); + } + } + } + + + bw.write("}\n"); + bw.close(); + } + + protected void traverseHeapRegionNodes(int mode, + HeapRegionNode hrn, + BufferedWriter bw, + TempDescriptor td, + HashSet visited, + boolean writeReferencers, + boolean hideSubsetReachability, + boolean hideEdgeTaints + ) throws java.io.IOException { + + if( visited.contains(hrn) ) { + return; + } + visited.add(hrn); + + switch( mode ) { + case VISIT_HRN_WRITE_FULL: + + String attributes = "["; + + if( hrn.isSingleObject() ) { + attributes += "shape=box"; + } else { + attributes += "shape=Msquare"; + } + + if( hrn.isFlagged() ) { + attributes += ",style=filled,fillcolor=lightgrey"; + } + + attributes += ",label=\"ID" + + hrn.getID() + + "\\n"; + + if( hrn.getType() != null ) { + attributes += hrn.getType().toPrettyString() + "\\n"; + } + + attributes += hrn.getDescription() + + "\\n" + + hrn.getAlphaString(hideSubsetReachability) + + "\"]"; + + bw.write(" " + hrn.toString() + attributes + ";\n"); + break; + } + + + + Iterator childRegionsItr = hrn.iteratorToReferencees(); + while( childRegionsItr.hasNext() ) { + RefEdge edge = childRegionsItr.next(); + HeapRegionNode hrnChild = edge.getDst(); + + switch( mode ) { + case VISIT_HRN_WRITE_FULL: + bw.write(" " + hrn.toString() + + " -> " + hrnChild.toString() + + "[label=\"" + edge.toGraphEdgeString(hideSubsetReachability, + hideEdgeTaints) + + "\",decorate];\n"); + break; + } + + traverseHeapRegionNodes(mode, + hrnChild, + bw, + td, + visited, + writeReferencers, + hideSubsetReachability, + hideEdgeTaints); + } + } + + public int getTaintIdentifierFromHRN(HeapRegionNode hrn){ + HashSet referenceEdges=hrn.referencers; + Iterator iter=referenceEdges.iterator(); + + int taintIdentifier=0; + while(iter.hasNext()){ + RefEdge edge=iter.next(); + taintIdentifier=taintIdentifier | edge.getTaintIdentifier(); + } + + return taintIdentifier; + + } + + public void propagateTaintIdentifier(HeapRegionNode hrn, int newTaintIdentifier, HashSet visitedSet){ + + HashSet setEdge=hrn.referencers; + Iterator iter=setEdge.iterator(); + while(iter.hasNext()){ + RefEdge edge= iter.next(); + edge.unionTaintIdentifier(newTaintIdentifier); + if(edge.getSrc() instanceof HeapRegionNode){ + + HeapRegionNode refHRN=(HeapRegionNode)edge.getSrc(); + //check whether it is reflexive edge + if(!refHRN.equals(hrn) && !visitedSet.contains(refHRN)){ + visitedSet.add(refHRN); + propagateTaintIdentifier((HeapRegionNode)edge.getSrc(),newTaintIdentifier,visitedSet); + } + + } + } + + } + + public void depropagateTaintIdentifier(HeapRegionNode hrn, int newTaintIdentifier, HashSet visitedSet){ + + HashSet setEdge=hrn.referencers; + Iterator iter=setEdge.iterator(); + while(iter.hasNext()){ + RefEdge edge= iter.next(); + edge.minusTaintIdentifier(newTaintIdentifier); + if(edge.getSrc() instanceof HeapRegionNode){ + + HeapRegionNode refHRN=(HeapRegionNode)edge.getSrc(); + //check whether it is reflexive edge + if(!refHRN.equals(hrn) && !visitedSet.contains(refHRN)){ + visitedSet.add(refHRN); + depropagateTaintIdentifier((HeapRegionNode)edge.getSrc(),newTaintIdentifier,visitedSet); + } + + } + } + + } + + + // in this analysis specifically: + // we have a notion that a null type is the "match any" type, + // so wrap calls to the utility methods that deal with null + public TypeDescriptor mostSpecificType( TypeDescriptor td1, + TypeDescriptor td2 ) { + if( td1 == null ) { + return td2; + } + if( td2 == null ) { + return td1; + } + if( td1.isNull() ) { + return td2; + } + if( td2.isNull() ) { + return td1; + } + return typeUtil.mostSpecific( td1, td2 ); + } + + public TypeDescriptor mostSpecificType( TypeDescriptor td1, + TypeDescriptor td2, + TypeDescriptor td3 ) { + + return mostSpecificType( td1, + mostSpecificType( td2, td3 ) + ); + } + + public TypeDescriptor mostSpecificType( TypeDescriptor td1, + TypeDescriptor td2, + TypeDescriptor td3, + TypeDescriptor td4 ) { + + return mostSpecificType( mostSpecificType( td1, td2 ), + mostSpecificType( td3, td4 ) + ); + } + + // remember, in this analysis a null type means "any type" + public boolean isSuperiorType( TypeDescriptor possibleSuper, + TypeDescriptor possibleChild ) { + if( possibleSuper == null || + possibleChild == null ) { + return true; + } + + if( possibleSuper.isNull() || + possibleChild.isNull() ) { + return true; + } + + return typeUtil.isSuperorType( possibleSuper, possibleChild ); + } + + public String generateUniqueIdentifier(FlatMethod fm, int paramIdx, String type){ + + //type: A->aliapsed parameter heap region + // P -> primary paramter heap region + // S -> secondary paramter heap region + + String identifier; + if(type.equals("A")){ + //aliased param + identifier="FM"+fm.hashCode()+".A"; + }else{ + identifier="FM"+fm.hashCode()+"."+paramIdx+"."+type; + } + return identifier; + + } + + public String generateUniqueIdentifier(AllocSite as, int age, boolean isSummary){ + + String identifier; + + FlatNew fn=as.getFlatNew(); + + if(isSummary){ + identifier="FN"+fn.hashCode()+".S"; + }else{ + identifier="FN"+fn.hashCode()+"."+age; + } + + return identifier; + + } +*/ +} diff --git a/Robust/src/Analysis/Disjoint/ReachSet.java b/Robust/src/Analysis/Disjoint/ReachSet.java new file mode 100644 index 00000000..ae80aafa --- /dev/null +++ b/Robust/src/Analysis/Disjoint/ReachSet.java @@ -0,0 +1,539 @@ +package Analysis.DisjointAnalysis; + +import IR.*; +import IR.Flat.*; +import java.util.*; +import java.io.*; + + +public class ReachSet extends Canonical { + + private HashSet possibleReachabilities; + + public ReachSet() { + possibleReachabilities = new HashSet(); + } + + public ReachSet(ReachState tts) { + this(); + assert tts != null; + possibleReachabilities.add(tts); + } + + public ReachSet(ReachTuple tt) { + // can't assert before calling this(), it will + // do the checking though + this( new ReachState(tt).makeCanonical() ); + } + + public ReachSet(HashSet possibleReachabilities) { + assert possibleReachabilities != null; + this.possibleReachabilities = possibleReachabilities; + } + + public ReachSet(ReachSet rs) { + assert rs != null; + // okay to clone, ReachSet should be canonical + possibleReachabilities = (HashSet)rs.possibleReachabilities.clone(); + } + + + public ReachSet makeCanonical() { + return (ReachSet) ReachSet.makeCanonical(this); + } + + public Iterator iterator() { + return possibleReachabilities.iterator(); + } + + + public int size() { + return possibleReachabilities.size(); + } + + public boolean isEmpty() { + return possibleReachabilities.isEmpty(); + } + + public boolean contains(ReachState tts) { + assert tts != null; + return possibleReachabilities.contains(tts); + } + + public boolean containsWithZeroes(ReachState tts) { + assert tts != null; + + if( possibleReachabilities.contains(tts) ) { + return true; + } + + Iterator itr = iterator(); + while( itr.hasNext() ) { + ReachState ttsThis = (ReachTupleSet) itr.next(); + if( ttsThis.containsWithZeroes(tts) ) { + return true; + } + } + + return false; + } + + + public boolean containsSuperSet(ReachState tts) { + return containsSuperSet( tts, false ); + } + + public boolean containsStrictSuperSet(ReachState tts) { + return containsSuperSet( tts, true ); + } + + public boolean containsSuperSet(ReachState tts, boolean strict) { + assert tts != null; + + if( !strict && possibleReachabilities.contains(tts) ) { + return true; + } + + Iterator itr = iterator(); + while( itr.hasNext() ) { + ReachState ttsThis = (ReachTupleSet) itr.next(); + if( strict ) { + if( !tts.equals(ttsThis) && tts.isSubset(ttsThis) ) { + return true; + } + } else { + if( tts.isSubset(ttsThis) ) { + return true; + } + } + } + + return false; + } + + + public boolean containsTuple(ReachTuple tt) { + Iterator itr = iterator(); + while( itr.hasNext() ) { + ReachState tts = (ReachTupleSet) itr.next(); + if( tts.containsTuple(tt) ) { + return true; + } + } + return false; + } + + public boolean containsTupleSetWithBoth(ReachTuple tt1, ReachTuple tt2) { + Iterator itr = iterator(); + while( itr.hasNext() ) { + ReachState tts = (ReachTupleSet) itr.next(); + if( tts.containsTuple(tt1) && tts.containsTuple(tt2) ) { + return true; + } + } + return false; + } + + public static ReachSet factory(ReachState tts) { + CanonicalWrapper cw=new CanonicalWrapper(tts); + if (lookuphash.containsKey(cw)) + return (ReachSet)lookuphash.get(cw).b; + ReachSet rs=new ReachSet(tts); + rs=rs.makeCanonical(); + cw.b=rs; + lookuphash.put(cw,cw); + return rs; + } + + public ReachSet union(ReachState ttsIn) { + ReachOperation ro=new ReachOperation(this, ttsIn); + if (unionhash.containsKey(ro)) { + return (ReachSet) unionhash.get(ro).c; + } else { + ReachSet rsOut = new ReachSet(this); + rsOut.possibleReachabilities.add(ttsIn); + ro.c=rsOut=rsOut.makeCanonical(); + unionhash.put(ro,ro); + return rsOut; + } + } + + + public ReachSet union(ReachSet rsIn) { + // assert rsIn != null; + + // assert can.containsKey(this); + // assert can.containsKey(rsIn); + + ReachOperation ro=new ReachOperation(this, rsIn); + if (unionhash.containsKey(ro)) + return (ReachSet) unionhash.get(ro).c; + else { + ReachSet rsOut = new ReachSet(this); + rsOut.possibleReachabilities.addAll(rsIn.possibleReachabilities); + ro.c=rsOut=rsOut.makeCanonical(); + unionhash.put(ro, ro); + return rsOut; + } + } + + public ReachSet intersection(ReachSet rsIn) { + // assert rsIn != null; + + // assert can.containsKey(this); + // assert can.containsKey(rsIn); + + ReachOperation ro=new ReachOperation(this, rsIn); + if (interhash.containsKey(ro)) + return (ReachSet) interhash.get(ro).c; + else { + ReachSet rsOut = new ReachSet(); + Iterator i = this.iterator(); + while( i.hasNext() ) { + ReachState tts = (ReachTupleSet) i.next(); + if( rsIn.possibleReachabilities.contains(tts) ) { + rsOut.possibleReachabilities.add(tts); + } + } + ro.c=rsOut=rsOut.makeCanonical(); + interhash.put(ro,ro); + return rsOut; + } + } + + + public ReachSet add(ReachState tts) { + assert tts != null; + return union(tts); + } + + public ReachSet remove(ReachState tts) { + assert tts != null; + ReachSet rsOut = new ReachSet(this); + assert rsOut.possibleReachabilities.remove(tts); + return rsOut.makeCanonical(); + } + + public ReachSet removeTokenAIfTokenB(ReachTuple ttA, + ReachTuple ttB) { + assert ttA != null; + assert ttB != null; + + ReachSet rsOut = new ReachSet(); + + Iterator i = this.iterator(); + while( i.hasNext() ) { + ReachState tts = (ReachTupleSet) i.next(); + if( tts.containsTuple( ttB ) ) { + rsOut.possibleReachabilities.add( tts.remove(ttA) ); + } else { + rsOut.possibleReachabilities.add( tts ); + } + } + + return rsOut.makeCanonical(); + } + + + public ReachSet applyChangeSet(ChangeSet C, boolean keepSourceState) { + assert C != null; + + ReachSet rsOut = new ReachSet(); + + Iterator i = this.iterator(); + while( i.hasNext() ) { + ReachState tts = (ReachTupleSet) i.next(); + + boolean changeFound = false; + + Iterator itrC = C.iterator(); + while( itrC.hasNext() ) { + ChangeTuple c = itrC.next(); + + if( tts.equals( c.getSetToMatch() ) ) { + rsOut.possibleReachabilities.add( c.getSetToAdd() ); + changeFound = true; + } + } + + if( keepSourceState || !changeFound ) { + rsOut.possibleReachabilities.add( tts ); + } + } + + return rsOut.makeCanonical(); + } + + + public ChangeSet unionUpArityToChangeSet(ReachSet rsIn) { + assert rsIn != null; + + ChangeSet ctsOut = new ChangeSet(); + + Iterator itrO = this.iterator(); + while( itrO.hasNext() ) { + ReachState o = (ReachTupleSet) itrO.next(); + + Iterator itrR = rsIn.iterator(); + while( itrR.hasNext() ) { + ReachState r = (ReachTupleSet) itrR.next(); + + ReachState theUnion = new ReachTupleSet().makeCanonical(); + + Iterator itrRelement = r.iterator(); + while( itrRelement.hasNext() ) { + ReachTuple ttR = (ReachTuple) itrRelement.next(); + ReachTuple ttO = o.containsToken(ttR.getToken() ); + + if( ttO != null ) { + theUnion = theUnion.union((new ReachState(ttR.unionArity(ttO)).makeCanonical() ) ); + } else { + theUnion = theUnion.union((new ReachState(ttR)).makeCanonical() ); + } + } + + Iterator itrOelement = o.iterator(); + while( itrOelement.hasNext() ) { + ReachTuple ttO = (ReachTuple) itrOelement.next(); + ReachTuple ttR = theUnion.containsToken(ttO.getToken() ); + + if( ttR == null ) { + theUnion = theUnion.union(new ReachState(ttO).makeCanonical() ); + } + } + + if( !theUnion.isEmpty() ) { + ctsOut = ctsOut.union((new ChangeSet(new ChangeTuple(o, theUnion) )).makeCanonical() ); + } + } + } + + return ctsOut.makeCanonical(); + } + + + public ReachSet ageTokens(AllocSite as) { + assert as != null; + + ReachSet rsOut = new ReachSet(); + + Iterator itrS = this.iterator(); + while( itrS.hasNext() ) { + ReachState tts = (ReachTupleSet) itrS.next(); + rsOut.possibleReachabilities.add(tts.ageTokens(as) ); + } + + return rsOut.makeCanonical(); + } + + + public ReachSet unshadowTokens(AllocSite as) { + assert as != null; + + ReachSet rsOut = new ReachSet(); + + Iterator itrS = this.iterator(); + while( itrS.hasNext() ) { + ReachState tts = (ReachTupleSet) itrS.next(); + rsOut.possibleReachabilities.add(tts.unshadowTokens(as) ); + } + + return rsOut.makeCanonical(); + } + + + public ReachSet toShadowTokens(AllocSite as) { + assert as != null; + + ReachSet rsOut = new ReachSet(); + + Iterator itrS = this.iterator(); + while( itrS.hasNext() ) { + ReachState tts = (ReachTupleSet) itrS.next(); + rsOut.possibleReachabilities.add(tts.toShadowTokens(as) ); + } + + return rsOut.makeCanonical(); + } + + + public ReachSet pruneBy(ReachSet rsIn) { + assert rsIn != null; + + ReachSet rsOut = new ReachSet(); + + Iterator itrB = this.iterator(); + while( itrB.hasNext() ) { + ReachState ttsB = (ReachTupleSet) itrB.next(); + + boolean subsetExists = false; + + Iterator itrA = rsIn.iterator(); + while( itrA.hasNext() && !subsetExists ) { + ReachState ttsA = (ReachTupleSet) itrA.next(); + + if( ttsA.isSubset(ttsB) ) { + subsetExists = true; + } + } + + if( subsetExists ) { + rsOut.possibleReachabilities.add(ttsB); + } + } + + return rsOut.makeCanonical(); + } + + + public ReachSet exhaustiveArityCombinations() { + ReachSet rsOut = (new ReachSet()).makeCanonical(); + + int numDimensions = this.possibleReachabilities.size(); + + if( numDimensions > 3 ) { + // for problems that are too big, punt and use less + // precise arity for reachability information + ReachState ttsImprecise = new ReachTupleSet().makeCanonical(); + + Iterator itrThis = this.iterator(); + while( itrThis.hasNext() ) { + ReachState ttsUnit = itrThis.next(); + ttsImprecise = ttsImprecise.unionUpArity(ttsUnit.makeArityZeroOrMore() ); + } + + //rsOut = this.union( ttsImprecise ); + rsOut = rsOut.union(ttsImprecise); + return rsOut; + } + + // add an extra digit to detect termination + int[] digits = new int[numDimensions+1]; + + int minArity = 0; + int maxArity = 2; + + // start with the minimum possible coordinate + for( int i = 0; i < numDimensions+1; ++i ) { + digits[i] = minArity; + } + + // and stop when the highest-ordered axis rolls above the minimum + while( digits[numDimensions] == minArity ) { + + // spit out a "coordinate" made from these digits + ReachState ttsCoordinate = new ReachTupleSet().makeCanonical(); + Iterator ttsItr = this.iterator(); + for( int i = 0; i < numDimensions; ++i ) { + assert ttsItr.hasNext(); + ReachState ttsUnit = ttsItr.next(); + for( int j = 0; j < digits[i]; ++j ) { + ttsCoordinate = ttsCoordinate.unionUpArity(ttsUnit); + } + } + rsOut = rsOut.add(ttsCoordinate.makeCanonical() ); + + // increment + for( int i = 0; i < numDimensions+1; ++i ) { + digits[i]++; + + if( digits[i] > maxArity ) { + // this axis reached its max, so roll it back to min and increment next higher digit + digits[i] = minArity; + + } else { + // this axis did not reach its max so we just enumerated a new unique coordinate, stop + break; + } + } + } + + return rsOut.makeCanonical(); + } + + + public boolean equals(Object o) { + if( o == null ) { + return false; + } + + if( !(o instanceof ReachSet) ) { + return false; + } + + ReachSet rs = (ReachSet) o; + return possibleReachabilities.equals(rs.possibleReachabilities); + } + + + private boolean oldHashSet = false; + private int oldHash = 0; + public int hashCode() { + int currentHash = possibleReachabilities.hashCode(); + + if( oldHashSet == false ) { + oldHash = currentHash; + oldHashSet = true; + } else { + if( oldHash != currentHash ) { + System.out.println("IF YOU SEE THIS A CANONICAL ReachSet CHANGED"); + Integer x = null; + x.toString(); + } + } + + return currentHash; + } + + + public String toStringEscapeNewline( boolean hideSubsetReachability ) { + String s = "["; + + Iterator i = this.iterator(); + while( i.hasNext() ) { + ReachState tts = i.next(); + + // skip this if there is a superset already + if( hideSubsetReachability && + containsStrictSuperSet( tts ) ) { + continue; + } + + s += tts; + if( i.hasNext() ) { + s += "\\n"; + } + } + + s += "]"; + return s; + } + + + public String toString() { + return toString( false ); + } + + public String toString( boolean hideSubsetReachability ) { + String s = "["; + + Iterator i = this.iterator(); + while( i.hasNext() ) { + ReachState tts = i.next(); + + // skip this if there is a superset already + if( hideSubsetReachability && + containsStrictSuperSet( tts ) ) { + continue; + } + + s += tts; + if( i.hasNext() ) { + s += "\n"; + } + } + + s += "]"; + return s; + } +} diff --git a/Robust/src/Analysis/Disjoint/ReachState.java b/Robust/src/Analysis/Disjoint/ReachState.java new file mode 100644 index 00000000..a4353072 --- /dev/null +++ b/Robust/src/Analysis/Disjoint/ReachState.java @@ -0,0 +1,442 @@ +package Analysis.DisjointAnalysis; + +import IR.*; +import IR.Flat.*; +import java.util.*; +import java.io.*; + + +public class ReachState extends Canonical { + + private HashSet tokenTuples; + + + public ReachState() { + tokenTuples = new HashSet(); + } + + public ReachState(ReachTuple tt) { + this(); + assert tt != null; + tokenTuples.add(tt); + } + + public ReachState(ReachTupleSet tts) { + assert tts != null; + // okay to clone, ReachTuple and ReachState should be canonical + tokenTuples = (HashSet)tts.tokenTuples.clone(); + } + + + public ReachState makeCanonical() { + return (ReachState) Canonical.makeCanonical(this); + } + + public Iterator iterator() { + return tokenTuples.iterator(); + } + + public boolean isEmpty() { + return tokenTuples.isEmpty(); + } + + public boolean isSubset(ReachState ttsIn) { + assert ttsIn != null; + return ttsIn.tokenTuples.containsAll(this.tokenTuples); + } + + public boolean containsTuple(ReachTuple tt) { + assert tt != null; + return tokenTuples.contains(tt); + } + + public boolean containsBoth(ReachTuple tt1, ReachTuple tt2) { + return containsTuple(tt1) && containsTuple(tt2); + } + + public boolean containsWithZeroes(ReachState tts) { + assert tts != null; + + // first establish that every token tuple from tts is + // also in this set + Iterator ttItrIn = tts.iterator(); + while( ttItrIn.hasNext() ) { + ReachTuple ttIn = ttItrIn.next(); + ReachTuple ttThis = this.containsToken(ttIn.getToken() ); + + if( ttThis == null ) { + return false; + } + } + + // then establish that anything in this set that is + // not in tts is a zero-arity token tuple, which is okay + Iterator ttItrThis = this.iterator(); + while( ttItrThis.hasNext() ) { + ReachTuple ttThis = ttItrThis.next(); + ReachTuple ttIn = tts.containsToken(ttThis.getToken() ); + + if( ttIn == null && + ttThis.getArity() != ReachTuple.ARITY_ZEROORMORE ) { + return false; + } + } + + // if so this set contains tts with zeroes + return true; + } + + public ReachState union(ReachTuple ttIn) { + assert ttIn != null; + ReachOperation ro=new ReachOperation(this, ttIn); + if (unionhash.containsKey(ro)) + return (ReachState) unionhash.get(ro).c; + else { + ReachState ttsOut = new ReachTupleSet(this); + ttsOut.tokenTuples.add(ttIn); + ro.c=ttsOut=ttsOut.makeCanonical(); + unionhash.put(ro,ro); + return ttsOut; + } + } + + public ReachState union(ReachTupleSet ttsIn) { + assert ttsIn != null; + ReachOperation ro=new ReachOperation(this, ttsIn); + if (unionhash.containsKey(ro)) { + return (ReachState) unionhash.get(ro).c; + } else { + ReachState ttsOut = new ReachTupleSet(this); + ttsOut.tokenTuples.addAll(ttsIn.tokenTuples); + ro.c=ttsOut=ttsOut.makeCanonical(); + unionhash.put(ro,ro); + return ttsOut; + } + } + + + public ReachState unionUpArity(ReachTupleSet ttsIn) { + assert ttsIn != null; + ReachState ttsOut = new ReachTupleSet(); + + Iterator ttItr = this.iterator(); + while( ttItr.hasNext() ) { + ReachTuple ttThis = ttItr.next(); + ReachTuple ttIn = ttsIn.containsToken(ttThis.getToken() ); + + if( ttIn != null ) { + ttsOut.tokenTuples.add(ttThis.unionArity(ttIn) ); + } else { + ttsOut.tokenTuples.add(ttThis); + } + } + + ttItr = ttsIn.iterator(); + while( ttItr.hasNext() ) { + ReachTuple ttIn = ttItr.next(); + ReachTuple ttThis = ttsOut.containsToken(ttIn.getToken() ); + + if( ttThis == null ) { + ttsOut.tokenTuples.add(ttIn); + } + } + + return ttsOut.makeCanonical(); + } + + + public ReachState add(ReachTuple tt) { + assert tt != null; + return this.union(tt); + } + + + public ReachState remove(ReachTuple tt) { + assert tt != null; + ReachState ttsOut = new ReachTupleSet(this); + ttsOut.tokenTuples.remove(tt); + return ttsOut.makeCanonical(); + } + + + public boolean equals(Object o) { + if( o == null ) { + return false; + } + + if( !(o instanceof ReachState) ) { + return false; + } + + ReachState tts = (ReachTupleSet) o; + return tokenTuples.equals(tts.tokenTuples); + } + + boolean hashcodecomputed=false; + int ourhashcode=0; + + + public int hashCode() { + if (hashcodecomputed) + return ourhashcode; + else { + ourhashcode=tokenTuples.hashCode(); + hashcodecomputed=true; + return ourhashcode; + } + } + + + // this should be a hash table so we can do this by key + public ReachTuple containsToken(Integer token) { + assert token != null; + + Iterator itr = tokenTuples.iterator(); + while( itr.hasNext() ) { + ReachTuple tt = (ReachTuple) itr.next(); + if( token.equals(tt.getToken() ) ) { + return tt; + } + } + return null; + } + + + public ReachState ageTokens(AllocSite as) { + assert as != null; + + ReachState ttsOut = new ReachTupleSet(); + + ReachTuple ttSummary = null; + ReachTuple ttOldest = null; + + Iterator itrT = this.iterator(); + while( itrT.hasNext() ) { + ReachTuple tt = (ReachTuple) itrT.next(); + + Integer token = tt.getToken(); + int age = as.getAgeCategory(token); + + // tokens not associated with + // the site should be left alone + if( age == AllocSite.AGE_notInThisSite ) { + ttsOut.tokenTuples.add(tt); + + } else if( age == AllocSite.AGE_summary ) { + // remember the summary tuple, but don't add it + // we may combine it with the oldest tuple + ttSummary = tt; + + } else if( age == AllocSite.AGE_oldest ) { + // found an oldest token, again just remember + // for later + ttOldest = tt; + + } else { + assert age == AllocSite.AGE_in_I; + + Integer I = as.getAge(token); + assert I != null; + + // otherwise, we change this token to the + // next older token + Integer tokenToChangeTo = as.getIthOldest(I + 1); + ReachTuple ttAged = tt.changeTokenTo(tokenToChangeTo); + ttsOut.tokenTuples.add(ttAged); + } + } + + // there are four cases to consider here + // 1. we found a summary tuple and no oldest tuple + // Here we just pass the summary unchanged + // 2. we found an oldest tuple, no summary + // Make a new, arity-one summary tuple + // 3. we found both a summary and an oldest + // Merge them by arity + // 4. (not handled) we found neither, do nothing + if ( ttSummary != null && ttOldest == null ) { + ttsOut.tokenTuples.add(ttSummary); + + } else if( ttSummary == null && ttOldest != null ) { + ttsOut.tokenTuples.add(new ReachTuple(as.getSummary(), + true, + ttOldest.getArity() + ).makeCanonical() + ); + + } else if( ttSummary != null && ttOldest != null ) { + ttsOut.tokenTuples.add(ttSummary.unionArity(new ReachTuple(as.getSummary(), + true, + ttOldest.getArity() + ).makeCanonical() + ) + ); + } + + return ttsOut.makeCanonical(); + } + + + public ReachState unshadowTokens(AllocSite as) { + assert as != null; + + ReachState ttsOut = new ReachTupleSet(); + + ReachTuple ttSummary = null; + ReachTuple ttShadowSummary = null; + + Iterator itrT = this.iterator(); + while( itrT.hasNext() ) { + ReachTuple tt = (ReachTuple) itrT.next(); + + Integer token = tt.getToken(); + int shadowAge = as.getShadowAgeCategory(token); + + if( shadowAge == AllocSite.AGE_summary ) { + // remember the summary tuple, but don't add it + // we may combine it with the oldest tuple + ttSummary = tt; + + } else if( shadowAge == AllocSite.SHADOWAGE_notInThisSite ) { + ttsOut.tokenTuples.add(tt); + + } else if( shadowAge == AllocSite.SHADOWAGE_summary ) { + // found the shadow summary token, again just remember + // for later + ttShadowSummary = tt; + + } else if( shadowAge == AllocSite.SHADOWAGE_oldest ) { + Integer tokenToChangeTo = as.getOldest(); + ReachTuple ttNormal = tt.changeTokenTo(tokenToChangeTo); + ttsOut.tokenTuples.add(ttNormal); + + } else { + assert shadowAge == AllocSite.SHADOWAGE_in_I; + + Integer I = as.getShadowAge(token); + assert I != null; + + Integer tokenToChangeTo = as.getIthOldest(-I); + ReachTuple ttNormal = tt.changeTokenTo(tokenToChangeTo); + ttsOut.tokenTuples.add(ttNormal); + } + } + + if ( ttSummary != null && ttShadowSummary == null ) { + ttsOut.tokenTuples.add(ttSummary); + + } else if( ttSummary == null && ttShadowSummary != null ) { + ttsOut.tokenTuples.add(new ReachTuple(as.getSummary(), + true, + ttShadowSummary.getArity() + ).makeCanonical() + ); + + } else if( ttSummary != null && ttShadowSummary != null ) { + ttsOut.tokenTuples.add(ttSummary.unionArity(new ReachTuple(as.getSummary(), + true, + ttShadowSummary.getArity() + ).makeCanonical() + ) + ); + } + + return ttsOut.makeCanonical(); + } + + + public ReachState toShadowTokens(AllocSite as) { + assert as != null; + + ReachState ttsOut = new ReachTupleSet().makeCanonical(); + + Iterator itrT = this.iterator(); + while( itrT.hasNext() ) { + ReachTuple tt = (ReachTuple) itrT.next(); + + Integer token = tt.getToken(); + int age = as.getAgeCategory(token); + + // summary tokens and tokens not associated with + // the site should be left alone + if( age == AllocSite.AGE_notInThisSite ) { + ttsOut = ttsOut.union(tt); + + } else if( age == AllocSite.AGE_summary ) { + ttsOut = ttsOut.union(tt.changeTokenTo(as.getSummaryShadow() )); + + } else if( age == AllocSite.AGE_oldest ) { + ttsOut = ttsOut.union(tt.changeTokenTo(as.getOldestShadow() )); + + } else { + assert age == AllocSite.AGE_in_I; + + Integer I = as.getAge(token); + assert I != null; + + ttsOut = ttsOut.union(tt.changeTokenTo(as.getIthOldestShadow(I) )); + } + } + + return ttsOut.makeCanonical(); + } + + + public ReachSet rewriteToken(ReachTuple tokenToRewrite, + ReachSet replacements, + boolean makeChangeSet, + Hashtable > forChangeSet) { + + ReachSet rsOut = new ReachSet().makeCanonical(); + + if( !tokenTuples.contains(tokenToRewrite) ) { + rsOut = rsOut.add(this); + + } else { + ReachState ttsMinusToken = new ReachTupleSet(this); + ttsMinusToken.tokenTuples.remove(tokenToRewrite); + + Iterator replaceItr = replacements.iterator(); + while( replaceItr.hasNext() ) { + ReachState replacement = replaceItr.next(); + ReachState replaced = new ReachTupleSet(ttsMinusToken).makeCanonical(); + replaced = replaced.unionUpArity(replacement); + rsOut = rsOut.add(replaced); + + if( makeChangeSet ) { + assert forChangeSet != null; + + if( forChangeSet.get(this) == null ) { + forChangeSet.put(this, new HashSet() ); + } + + forChangeSet.get(this).add(replaced); + } + } + } + + return rsOut.makeCanonical(); + } + + + public ReachState makeArityZeroOrMore() { + ReachState ttsOut = new ReachTupleSet().makeCanonical(); + + Iterator itrThis = this.iterator(); + while( itrThis.hasNext() ) { + ReachTuple tt = itrThis.next(); + + ttsOut = ttsOut.union(new ReachTuple(tt.getToken(), + tt.isMultiObject(), + ReachTuple.ARITY_ZEROORMORE + ).makeCanonical() + ); + } + + return ttsOut.makeCanonical(); + } + + public String toString() { + return tokenTuples.toString(); + } +} diff --git a/Robust/src/Analysis/Disjoint/ReachTuple.java b/Robust/src/Analysis/Disjoint/ReachTuple.java new file mode 100644 index 00000000..bcca2918 --- /dev/null +++ b/Robust/src/Analysis/Disjoint/ReachTuple.java @@ -0,0 +1,144 @@ +package Analysis.DisjointAnalysis; + +import IR.*; +import IR.Flat.*; +import java.util.*; +import java.io.*; + + +// a token touple is a pair that indicates a +// heap region node and an arity + +// THIS CLASS IS IMMUTABLE! + +public class ReachTuple extends Canonical { + + private Integer token; + private boolean isMultiObject; + + public static final int ARITY_ZEROORMORE = 0; + public static final int ARITY_ONE = 1; + public static final int ARITY_ONEORMORE = 2; + private int arity; + + + public ReachTuple(HeapRegionNode hrn) { + assert hrn != null; + + token = hrn.getID(); + isMultiObject = !hrn.isSingleObject(); + arity = ARITY_ONE; + fixStuff(); + } + + public ReachTuple(Integer token, + boolean isMultiObject, + int arity) { + assert token != null; + + this.token = token; + this.isMultiObject = isMultiObject; + this.arity = arity; + fixStuff(); + } + + private void fixStuff() { + //This is an evil hack...we should fix this stuff elsewhere... + if (!isMultiObject) { + arity=ARITY_ONE; + } else { + if (arity==ARITY_ONEORMORE) + arity=ARITY_ZEROORMORE; + } + } + + + public ReachTuple makeCanonical() { + return (ReachTuple) Canonical.makeCanonical(this); + } + + + public Integer getToken() { + return token; + } + + public boolean isMultiObject() { + return isMultiObject; + } + + public int getArity() { + return arity; + } + + + public ReachTuple unionArity(ReachTuple tt) { + assert tt != null; + assert token == tt.token; + assert isMultiObject == tt.isMultiObject; + + if( isMultiObject ) { + // for multiple objects only zero-or-mores combined are still zero-or-more + // when two tokens are present (absence of a token is arity=zero and is + // handled outside of this method) + if( arity == ARITY_ZEROORMORE && tt.arity == ARITY_ZEROORMORE ) { + return new ReachTuple(token, true, ARITY_ZEROORMORE).makeCanonical(); + } else { + return new ReachTuple(token, true, ARITY_ONEORMORE).makeCanonical(); + } + + } else { + // a single object region's token can only have ZEROORMORE or ONE + if( arity == ARITY_ZEROORMORE && tt.arity == ARITY_ZEROORMORE ) { + return new ReachTuple(token, false, ARITY_ZEROORMORE).makeCanonical(); + } else { + return new ReachTuple(token, false, ARITY_ONE).makeCanonical(); + } + } + } + + + public ReachTuple changeTokenTo(Integer tokenToChangeTo) { + assert tokenToChangeTo != null; + + return new ReachTuple(tokenToChangeTo, + isMultiObject, + arity).makeCanonical(); + } + + + public boolean equals(Object o) { + if( o == null ) { + return false; + } + + if( !(o instanceof ReachTuple) ) { + return false; + } + + ReachTuple tt = (ReachTuple) o; + + return token.equals(tt.getToken() ) && + arity == tt.getArity(); + } + + public int hashCode() { + return (token.intValue() << 2) ^ arity; + } + + + public String toString() { + String s = token.toString(); + + if( isMultiObject ) { + s += "M"; + } + + if( arity == ARITY_ZEROORMORE ) { + s += "*"; + } else if( arity == ARITY_ONEORMORE ) { + s += "+"; + } + + return s; + } +} diff --git a/Robust/src/Analysis/Disjoint/RefEdge.java b/Robust/src/Analysis/Disjoint/RefEdge.java new file mode 100644 index 00000000..9f061912 --- /dev/null +++ b/Robust/src/Analysis/Disjoint/RefEdge.java @@ -0,0 +1,274 @@ +package Analysis.DisjointAnalysis; + +import IR.*; +import IR.Flat.*; + +public class RefEdge { + + // all edges should have a non-null + // TypeDescriptor now + protected TypeDescriptor type; + + // the field name may be null if this + // edge models a variable reference + protected String field; + + protected boolean isInitialParam; + + protected ReachSet beta; + protected ReachSet betaNew; + + protected ReferenceSourceNode src; + protected HeapRegionNode dst; + + + public RefEdge( ReferenceSourceNode src, + HeapRegionNode dst, + TypeDescriptor type, + String field, + boolean isInitialParam, + ReachSet beta ) { + assert type != null; + + this.src = src; + this.dst = dst; + this.type = type; + this.field = field; + this.isInitialParam = isInitialParam; + + if( beta != null ) { + this.beta = beta; + } else { + this.beta = new ReachSet().makeCanonical(); + } + + // when edges are not undergoing an operation that + // is changing beta info, betaNew is always empty + betaNew = new ReachSet().makeCanonical(); + } + + + public RefEdge copy() { + RefEdge copy = new RefEdge( src, + dst, + type, + field, + isInitialParam, + beta ); + return copy; + } + + + public boolean equals( Object o ) { + if( o == null ) { + return false; + } + + if( !(o instanceof RefEdge) ) { + return false; + } + + RefEdge edge = (RefEdge) o; + + if( !typeEquals( edge.type ) ) { + return false; + } + + if( !fieldEquals( edge.field ) ) { + return false; + } + + // Equality of edges is only valid within a graph, so + // compare src and dst by reference + if( !(src == edge.src) || + !(dst == edge.dst) ) { + return false; + } + + return true; + } + + + public boolean equalsIncludingBeta( RefEdge edge ) { + return equals( edge ) && beta.equals( edge.beta ); + } + + + public int hashCode() { + int hash = 0; + + hash += type.hashCode()*17; + + if( field != null ) { + hash += field.hashCode()*7; + } + + hash += src.hashCode()*11; + hash += dst.hashCode(); + + return hash; + } + + + public ReferenceSourceNode getSrc() { + return src; + } + + public void setSrc( ReferenceSourceNode rsn ) { + assert rsn != null; + src = rsn; + } + + public HeapRegionNode getDst() { + return dst; + } + + public void setDst( HeapRegionNode hrn ) { + assert hrn != null; + dst = hrn; + } + + + public TypeDescriptor getType() { + return type; + } + + public void setType( TypeDescriptor td ) { + assert td != null; + type = td; + } + + public String getField() { + return field; + } + + public void setField( String s ) { + field = s; + } + + + public boolean typeEquals( TypeDescriptor td ) { + return type.equals( td ); + } + + public boolean fieldEquals( String s ) { + if( field == null && s == null ) { + return true; + } + if( field == null ) { + return false; + } + return field.equals( s ); + } + + public boolean typeAndFieldEquals( RefEdge e ) { + return typeEquals ( e.getType() ) && + fieldEquals( e.getField() ); + } + + + public boolean isInitialParam() { + return isInitialParam; + } + + public void setIsInitialParam( boolean isInitialParam ) { + this.isInitialParam = isInitialParam; + } + + + public ReachSet getBeta() { + return beta; + } + + public void setBeta( ReachSet beta ) { + assert beta != null; + this.beta = beta; + } + + public ReachSet getBetaNew() { + return betaNew; + } + + public void setBetaNew( ReachSet beta ) { + assert beta != null; + this.betaNew = beta; + } + + public void applyBetaNew() { + assert betaNew != null; + + beta = betaNew; + betaNew = new ReachSet().makeCanonical(); + } + + + public String toGraphEdgeString( boolean hideSubsetReachability, + boolean hideEdgeTaints ) { + String edgeLabel = ""; + + if (type != null) { + edgeLabel += type.toPrettyString() + "\\n"; + } + + if (field != null) { + edgeLabel += field + "\\n"; + } + + if (isInitialParam) { + edgeLabel += "*init*\\n"; + } + + if( !hideEdgeTaints ) { + edgeLabel += "*taint*=" + Integer.toBinaryString(taintIdentifier) + + "\\n*SESE*=" + Integer.toBinaryString(SESEtaintIdentifier) + + "\\n"; + } + + edgeLabel += beta.toStringEscapeNewline(hideSubsetReachability); + + return edgeLabel; + } + + public String toString() { + if( type != null ) { + return new String("("+src+"->"+type.toPrettyString()+" "+field+"->"+dst+")"); + } + + return new String("("+src+"->"+type+" "+field+"->"+dst+")"); + } + + public void tainedBy(Integer paramIdx){ + int newTaint=(int) Math.pow(2, paramIdx.intValue()); + taintIdentifier=taintIdentifier | newTaint; + } + + public void setTaintIdentifier(int newTaint){ + taintIdentifier=newTaint; + } + + public void unionTaintIdentifier(int newTaint){ + taintIdentifier=taintIdentifier | newTaint; + } + + public void minusTaintIdentifier(int removedTaint){ + taintIdentifier = taintIdentifier & (~removedTaint); + } + + public int getTaintIdentifier(){ + return taintIdentifier; + } + + public int getSESETaintIdentifier(){ + return SESEtaintIdentifier; + } + + public void setSESETaintIdentifier(int newTaint){ + SESEtaintIdentifier=newTaint; + } + + public void unionSESETaintIdentifier(int newTaint){ + SESEtaintIdentifier=SESEtaintIdentifier | newTaint; + } + + +} diff --git a/Robust/src/Analysis/Disjoint/RefSrcNode.java b/Robust/src/Analysis/Disjoint/RefSrcNode.java new file mode 100644 index 00000000..c4e88bda --- /dev/null +++ b/Robust/src/Analysis/Disjoint/RefSrcNode.java @@ -0,0 +1,57 @@ +package Analysis.DisjointAnalysis; + +import IR.*; +import IR.Flat.*; +import java.util.*; + +public abstract class ReferenceSourceNode { + + protected HashSet referencees; + + public ReferenceSourceNode() { + referencees = new HashSet(); + } + + + public Iterator iteratorToReferencees() { + return referencees.iterator(); + } + + public Iterator iteratorToReferenceesClone() { + HashSet clone = (HashSet)referencees.clone(); + return clone.iterator(); + } + + public int getNumReferencees() { + return referencees.size(); + } + + public void addReferencee( RefEdge edge ) { + assert edge != null; + referencees.add( edge ); + } + + public void removeReferencee( RefEdge edge ) { + assert edge != null; + assert referencees.contains( edge ); + referencees.remove( edge ); + } + + public RefEdge getReferenceTo(HeapRegionNode hrn, + TypeDescriptor type, + String field) { + assert hrn != null; + + Iterator itrEdge = referencees.iterator(); + while( itrEdge.hasNext() ) { + RefEdge edge = itrEdge.next(); + if( edge.getDst().equals(hrn) && + edge.typeEquals( type ) && + edge.fieldEquals( field ) ) { + return edge; + } + } + + return null; + } +} \ No newline at end of file diff --git a/Robust/src/Analysis/Disjoint/VariableNode.java b/Robust/src/Analysis/Disjoint/VariableNode.java new file mode 100644 index 00000000..653908a4 --- /dev/null +++ b/Robust/src/Analysis/Disjoint/VariableNode.java @@ -0,0 +1,43 @@ +package Analysis.DisjointAnalysis; + +import IR.*; +import IR.Flat.*; +import java.util.*; + +public class VariableNode extends RefSrcNode { + protected TempDescriptor td; + + public VariableNode(TempDescriptor td) { + this.td = td; + } + + public TempDescriptor getTempDescriptor() { + return td; + } + + public boolean equals(Object o) { + if( o == null ) { + return false; + } + + if( !( o instanceof VariableNode) ) { + return false; + } + + VariableNode ln = (VariableNode) o; + + return td == ln.getTempDescriptor(); + } + + public int hashCode() { + return td.getNum(); + } + + public String getTempDescriptorString() { + return td.toString(); + } + + public String toString() { + return "LN_"+getTempDescriptorString(); + } +} diff --git a/Robust/src/IR/State.java b/Robust/src/IR/State.java index f69247d1..bc7e742b 100644 --- a/Robust/src/IR/State.java +++ b/Robust/src/IR/State.java @@ -69,6 +69,15 @@ public class State { public int OWNERSHIPDEBUGCALLCOUNT=0; public String OWNERSHIPDEBUGCALLEE=null; public String OWNERSHIPDEBUGCALLER=null; + public boolean DISJOINT=false; + public int DISJOINTALLOCDEPTH=3; + public boolean DISJOINTWRITEDOTS=false; + public boolean DISJOINTWRITEALL=false; + public String DISJOINTALIASFILE=null; + public boolean DISJOINTALIASTAB=false; + public int DISJOINTDEBUGCALLCOUNT=0; + public String DISJOINTDEBUGCALLEE=null; + public String DISJOINTDEBUGCALLER=null; public boolean OPTIONAL=false; public boolean ARRAYBOUNDARYCHECK=true; public boolean RAW=false; diff --git a/Robust/src/Main/Main.java b/Robust/src/Main/Main.java index 4a3cc7c3..1487ff8a 100644 --- a/Robust/src/Main/Main.java +++ b/Robust/src/Main/Main.java @@ -41,6 +41,7 @@ import Analysis.Locality.GenerateConversions; import Analysis.Prefetch.PrefetchAnalysis; import Analysis.FlatIRGraph.FlatIRGraph; import Analysis.OwnershipAnalysis.OwnershipAnalysis; +import Analysis.Disjoint.DisjointAnalysis; import Analysis.MLP.MLPAnalysis; import Analysis.Loops.*; import Analysis.Liveness; @@ -159,6 +160,35 @@ public class Main { } else if (option.equals("-owndebugcallcount")) { state.OWNERSHIPDEBUGCALLCOUNT=Integer.parseInt(args[++i]); } + else if (option.equals("-disjoint")) + state.DISJOINT=true; + else if (option.equals("-disjoint-k")) { + state.DISJOINTALLOCDEPTH=Integer.parseInt(args[++i]); + } else if (option.equals("-disjoint-write-dots")) { + state.DISJOINTWRITEDOTS = true; + String arg = args[++i]; + if( arg.equals("all") ) { + state.DISJOINTWRITEALL = true; + } else if( arg.equals("final") ) { + state.DISJOINTWRITEALL = false; + } else { + throw new Error("disjoint-write-dots requires argument "); + } + } else if (option.equals("-disjoint-alias-file")) { + state.DISJOINTALIASFILE = args[++i]; + String arg = args[++i]; + if( arg.equals("normal") ) { + state.DISJOINTALIASTAB = false; + } else if( arg.equals("tabbed") ) { + state.DISJOINTALIASTAB = true; + } else { + throw new Error("disjoint-alias-file requires arguments "); + } + } else if (option.equals("-disjoint-debug-callsite")) { + state.DISJOINTDEBUGCALLEE=args[++i]; + state.DISJOINTDEBUGCALLER=args[++i]; + state.DISJOINTDEBUGCALLCOUNT=Integer.parseInt(args[++i]); + } else if (option.equals("-optional")) state.OPTIONAL=true; else if (option.equals("-optimize")) @@ -387,6 +417,13 @@ public class Main { oa); } + if (state.DISJOINT) { + CallGraph cg = new CallGraph(state); + Liveness l = new Liveness(); + ArrayReferencees ar = new ArrayReferencees(state); + DisjointAnalysis oa = new DisjointAnalysis(state, tu, cg, l, ar); + } + if (state.TAGSTATE) { CallGraph callgraph=new CallGraph(state); TagAnalysis taganalysis=new TagAnalysis(state, callgraph); diff --git a/Robust/src/Makefile b/Robust/src/Makefile index b51c20b8..c6fc2c71 100644 --- a/Robust/src/Makefile +++ b/Robust/src/Makefile @@ -89,6 +89,8 @@ Analysis/OwnershipAnalysis/Canonical.class \ Analysis/OwnershipAnalysis/MethodContext.class \ Analysis/OwnershipAnalysis/ParameterDecomposition.class \ Analysis/OwnershipAnalysis/AccessPath.class \ +Analysis/Disjoint/DisjointAnalysis.class \ +Analysis/Disjoint/ReachGraph.class \ Analysis/MLP/MLPAnalysis.class \ Analysis/MLP/VariableSourceToken.class \ Analysis/MLP/SVKey.class \ @@ -126,6 +128,7 @@ JAVAFILES=IR/*.java \ Analysis/Loops/*.java \ Analysis/Locality/*.java \ Analysis/OwnershipAnalysis/*.java \ + Analysis/Disjoint/*.java \ Analysis/MLP/*.java \ Analysis/Prefetch/*.java \ Analysis/Scheduling/*.java \ @@ -177,13 +180,13 @@ mytabbing: javadoc: mkdir javadoc - javadoc -classpath ../cup:.:$(CLASSPATH) -sourcepath . -private -d javadoc Lex Util IR IR.Tree IR.Flat Analysis Analysis.CallGraph Analysis.Flag Analysis.TaskStateAnalysis Analysis.Locality Analysis.Prefetch Main Analysis.OwnershipAnalysis Analysis.MLP Analysis.Scheduling + javadoc -classpath ../cup:.:$(CLASSPATH) -sourcepath . -private -d javadoc Lex Util IR IR.Tree IR.Flat Analysis Analysis.CallGraph Analysis.Flag Analysis.TaskStateAnalysis Analysis.Locality Analysis.Prefetch Main Analysis.OwnershipAnalysis Analysis.Disjoint Analysis.MLP Analysis.Scheduling clean: - rm -f IR/*.class IR/Tree/*.class Main/*.class Lex/*.class Parse/*.class Parse/Sym.java Parse/Parser.java IR/Flat/*.class classdefs.h methodheaders.h methods.c structdefs.h virtualtable.h task.h taskdefs.c taskdefs.h Analysis/*.class Analysis/Flag/*.class Analysis/CallGraph/*.class Analysis/TaskStateAnalysis/*.class Interface/*.class Util/*.class Analysis/Locality/*.class Analysis/Prefetch/*.class Analysis/FlatIRGraph/*.class Analysis/OwnershipAnalysis/*.class Analysis/MLP/*.class Analysis/Scheduling/*.class Analysis/Loops/*.class + rm -f IR/*.class IR/Tree/*.class Main/*.class Lex/*.class Parse/*.class Parse/Sym.java Parse/Parser.java IR/Flat/*.class classdefs.h methodheaders.h methods.c structdefs.h virtualtable.h task.h taskdefs.c taskdefs.h Analysis/*.class Analysis/Flag/*.class Analysis/CallGraph/*.class Analysis/TaskStateAnalysis/*.class Interface/*.class Util/*.class Analysis/Locality/*.class Analysis/Prefetch/*.class Analysis/FlatIRGraph/*.class Analysis/OwnershipAnalysis/*.class Analysis/Disjoint/*.class Analysis/MLP/*.class Analysis/Scheduling/*.class Analysis/Loops/*.class cleanclass: - rm -f IR/*.class IR/Tree/*.class Main/*.class IR/Flat/*.class Analysis/*.class Analysis/Flag/*.class Analysis/CallGraph/*.class Analysis/TaskStateAnalysis/*.class Interface/*.class Util/*.class Analysis/Locality/*.class Analysis/Prefetch/*.class Analysis/FlatIRGraph/*.class Analysis/OwnershipAnalysis/*.class Analysis/MLP/*.class Analysis/Scheduling/*.class Analysis/Loops/*.class + rm -f IR/*.class IR/Tree/*.class Main/*.class IR/Flat/*.class Analysis/*.class Analysis/Flag/*.class Analysis/CallGraph/*.class Analysis/TaskStateAnalysis/*.class Interface/*.class Util/*.class Analysis/Locality/*.class Analysis/Prefetch/*.class Analysis/FlatIRGraph/*.class Analysis/OwnershipAnalysis/*.class Analysis/Disjoint/*.class Analysis/MLP/*.class Analysis/Scheduling/*.class Analysis/Loops/*.class cleandoc: rm -rf javadoc diff --git a/Robust/src/Tests/disjoint/simple/makefile b/Robust/src/Tests/disjoint/simple/makefile new file mode 100644 index 00000000..dcb012af --- /dev/null +++ b/Robust/src/Tests/disjoint/simple/makefile @@ -0,0 +1,27 @@ +PROGRAM=test + +SOURCE_FILES=$(PROGRAM).java + +BUILDSCRIPT=~/research/Robust/src/buildscript +BSFLAGS= -mainclass Test -justanalyze -disjoint -disjoint-k 1 -disjoint-write-dots final -disjoint-alias-file aliases.txt normal -enable-assertions + +all: $(PROGRAM).bin + +view: PNGs + eog *.png & + +PNGs: DOTs + d2p *COMPLETE*.dot + +DOTs: $(PROGRAM).bin + +$(PROGRAM).bin: $(SOURCE_FILES) + $(BUILDSCRIPT) $(BSFLAGS) -o $(PROGRAM) $(SOURCE_FILES) + +clean: + rm -f $(PROGRAM).bin + rm -fr tmpbuilddirectory + rm -f *~ + rm -f *.dot + rm -f *.png + rm -f aliases.txt diff --git a/Robust/src/Tests/disjoint/simple/test.java b/Robust/src/Tests/disjoint/simple/test.java new file mode 100644 index 00000000..3d271637 --- /dev/null +++ b/Robust/src/Tests/disjoint/simple/test.java @@ -0,0 +1,8 @@ +public class Test { + static public void main( String[] args ) { + f1(); + } + + static public void f1() { + } +} -- 2.34.1