1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
8 import IR.Tree.Modifiers;
13 public class DisjointAnalysis {
15 ///////////////////////////////////////////
17 // Public interface to discover possible
18 // aliases in the program under analysis
20 ///////////////////////////////////////////
22 public HashSet<AllocSite>
23 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
24 checkAnalysisComplete();
25 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
28 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
29 checkAnalysisComplete();
30 return getAllocSiteFromFlatNewPRIVATE(fn);
33 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
34 checkAnalysisComplete();
35 return mapHrnIdToAllocSite.get(id);
38 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
41 checkAnalysisComplete();
42 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
43 FlatMethod fm=state.getMethodFlat(taskOrMethod);
45 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
48 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
49 int paramIndex, AllocSite alloc) {
50 checkAnalysisComplete();
51 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
52 FlatMethod fm=state.getMethodFlat(taskOrMethod);
54 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
57 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
58 AllocSite alloc, int paramIndex) {
59 checkAnalysisComplete();
60 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
61 FlatMethod fm=state.getMethodFlat(taskOrMethod);
63 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
66 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
67 AllocSite alloc1, AllocSite alloc2) {
68 checkAnalysisComplete();
69 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
71 return rg.mayReachSharedObjects(alloc1, alloc2);
74 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
75 checkAnalysisComplete();
79 Iterator<HeapRegionNode> i = s.iterator();
81 HeapRegionNode n = i.next();
83 AllocSite as = n.getAllocSite();
85 out += " " + n.toString() + ",\n";
87 out += " " + n.toString() + ": " + as.toStringVerbose()
96 // use the methods given above to check every possible sharing class
97 // between task parameters and flagged allocation sites reachable
99 public void writeAllSharing(String outputFile,
102 boolean tabularOutput,
105 throws java.io.IOException {
106 checkAnalysisComplete();
108 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
110 if (!tabularOutput) {
111 bw.write("Conducting ownership analysis with allocation depth = "
112 + allocationDepth + "\n");
113 bw.write(timeReport + "\n");
118 // look through every task for potential sharing
119 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
120 while (taskItr.hasNext()) {
121 TaskDescriptor td = (TaskDescriptor) taskItr.next();
123 if (!tabularOutput) {
124 bw.write("\n---------" + td + "--------\n");
127 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
129 Set<HeapRegionNode> common;
131 // for each task parameter, check for sharing classes with
132 // other task parameters and every allocation site
133 // reachable from this task
134 boolean foundSomeSharing = false;
136 FlatMethod fm = state.getMethodFlat(td);
137 for (int i = 0; i < fm.numParameters(); ++i) {
139 // skip parameters with types that cannot reference
141 if( !shouldAnalysisTrack( fm.getParameter( i ).getType() ) ) {
145 // for the ith parameter check for sharing classes to all
146 // higher numbered parameters
147 for (int j = i + 1; j < fm.numParameters(); ++j) {
149 // skip parameters with types that cannot reference
151 if( !shouldAnalysisTrack( fm.getParameter( j ).getType() ) ) {
156 common = hasPotentialSharing(td, i, j);
157 if (!common.isEmpty()) {
158 foundSomeSharing = true;
160 if (!tabularOutput) {
161 bw.write("Potential sharing between parameters " + i
162 + " and " + j + ".\n");
163 bw.write(prettyPrintNodeSet(common) + "\n");
168 // for the ith parameter, check for sharing classes against
169 // the set of allocation sites reachable from this
171 Iterator allocItr = allocSites.iterator();
172 while (allocItr.hasNext()) {
173 AllocSite as = (AllocSite) allocItr.next();
174 common = hasPotentialSharing(td, i, as);
175 if (!common.isEmpty()) {
176 foundSomeSharing = true;
178 if (!tabularOutput) {
179 bw.write("Potential sharing between parameter " + i
180 + " and " + as.getFlatNew() + ".\n");
181 bw.write(prettyPrintNodeSet(common) + "\n");
187 // for each allocation site check for sharing classes with
188 // other allocation sites in the context of execution
190 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
191 Iterator allocItr1 = allocSites.iterator();
192 while (allocItr1.hasNext()) {
193 AllocSite as1 = (AllocSite) allocItr1.next();
195 Iterator allocItr2 = allocSites.iterator();
196 while (allocItr2.hasNext()) {
197 AllocSite as2 = (AllocSite) allocItr2.next();
199 if (!outerChecked.contains(as2)) {
200 common = hasPotentialSharing(td, as1, as2);
202 if (!common.isEmpty()) {
203 foundSomeSharing = true;
205 if (!tabularOutput) {
206 bw.write("Potential sharing between "
207 + as1.getFlatNew() + " and "
208 + as2.getFlatNew() + ".\n");
209 bw.write(prettyPrintNodeSet(common) + "\n");
215 outerChecked.add(as1);
218 if (!foundSomeSharing) {
219 if (!tabularOutput) {
220 bw.write("No sharing between flagged objects in Task " + td
228 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
229 + " & " + numMethodsAnalyzed() + " \\\\\n");
231 bw.write("\nNumber sharing classes: "+numSharing);
237 // this version of writeAllSharing is for Java programs that have no tasks
238 public void writeAllSharingJava(String outputFile,
241 boolean tabularOutput,
244 throws java.io.IOException {
245 checkAnalysisComplete();
251 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
253 bw.write("Conducting disjoint reachability analysis with allocation depth = "
254 + allocationDepth + "\n");
255 bw.write(timeReport + "\n\n");
257 boolean foundSomeSharing = false;
259 Descriptor d = typeUtil.getMain();
260 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
262 // for each allocation site check for sharing classes with
263 // other allocation sites in the context of execution
265 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
266 Iterator allocItr1 = allocSites.iterator();
267 while (allocItr1.hasNext()) {
268 AllocSite as1 = (AllocSite) allocItr1.next();
270 Iterator allocItr2 = allocSites.iterator();
271 while (allocItr2.hasNext()) {
272 AllocSite as2 = (AllocSite) allocItr2.next();
274 if (!outerChecked.contains(as2)) {
275 Set<HeapRegionNode> common = hasPotentialSharing(d,
278 if (!common.isEmpty()) {
279 foundSomeSharing = true;
280 bw.write("Potential sharing between "
281 + as1.getDisjointAnalysisId() + " and "
282 + as2.getDisjointAnalysisId() + ".\n");
283 bw.write(prettyPrintNodeSet(common) + "\n");
289 outerChecked.add(as1);
292 if (!foundSomeSharing) {
293 bw.write("No sharing classes between flagged objects found.\n");
295 bw.write("\nNumber sharing classes: "+numSharing);
298 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
303 ///////////////////////////////////////////
305 // end public interface
307 ///////////////////////////////////////////
309 protected void checkAnalysisComplete() {
310 if( !analysisComplete ) {
311 throw new Error("Warning: public interface method called while analysis is running.");
316 // run in faster mode, only when bugs wrung out!
317 public static boolean releaseMode;
319 // data from the compiler
321 public CallGraph callGraph;
322 public Liveness liveness;
323 public ArrayReferencees arrayReferencees;
324 public TypeUtil typeUtil;
325 public int allocationDepth;
327 // data structure for public interface
328 private Hashtable<Descriptor, HashSet<AllocSite> > mapDescriptorToAllocSiteSet;
331 // for public interface methods to warn that they
332 // are grabbing results during analysis
333 private boolean analysisComplete;
336 // used to identify HeapRegionNode objects
337 // A unique ID equates an object in one
338 // ownership graph with an object in another
339 // graph that logically represents the same
341 // start at 10 and increment to reserve some
342 // IDs for special purposes
343 static protected int uniqueIDcount = 10;
346 // An out-of-scope method created by the
347 // analysis that has no parameters, and
348 // appears to allocate the command line
349 // arguments, then invoke the source code's
350 // main method. The purpose of this is to
351 // provide the analysis with an explicit
352 // top-level context with no parameters
353 protected MethodDescriptor mdAnalysisEntry;
354 protected FlatMethod fmAnalysisEntry;
356 // main method defined by source program
357 protected MethodDescriptor mdSourceEntry;
359 // the set of task and/or method descriptors
360 // reachable in call graph
361 protected Set<Descriptor>
362 descriptorsToAnalyze;
364 // current descriptors to visit in fixed-point
365 // interprocedural analysis, prioritized by
366 // dependency in the call graph
367 protected Stack<DescriptorQWrapper>
368 descriptorsToVisitStack;
369 protected PriorityQueue<DescriptorQWrapper>
372 // a duplication of the above structure, but
373 // for efficient testing of inclusion
374 protected HashSet<Descriptor>
375 descriptorsToVisitSet;
377 // storage for priorities (doesn't make sense)
378 // to add it to the Descriptor class, just in
380 protected Hashtable<Descriptor, Integer>
381 mapDescriptorToPriority;
383 // when analyzing a method and scheduling more:
384 // remember set of callee's enqueued for analysis
385 // so they can be put on top of the callers in
386 // the stack-visit mode
387 protected Set<Descriptor>
391 // maps a descriptor to its current partial result
392 // from the intraprocedural fixed-point analysis--
393 // then the interprocedural analysis settles, this
394 // mapping will have the final results for each
396 protected Hashtable<Descriptor, ReachGraph>
397 mapDescriptorToCompleteReachGraph;
399 // maps a descriptor to its known dependents: namely
400 // methods or tasks that call the descriptor's method
401 // AND are part of this analysis (reachable from main)
402 protected Hashtable< Descriptor, Set<Descriptor> >
403 mapDescriptorToSetDependents;
405 // maps each flat new to one analysis abstraction
406 // allocate site object, these exist outside reach graphs
407 protected Hashtable<FlatNew, AllocSite>
408 mapFlatNewToAllocSite;
410 // maps intergraph heap region IDs to intergraph
411 // allocation sites that created them, a redundant
412 // structure for efficiency in some operations
413 protected Hashtable<Integer, AllocSite>
416 // maps a method to its initial heap model (IHM) that
417 // is the set of reachability graphs from every caller
418 // site, all merged together. The reason that we keep
419 // them separate is that any one call site's contribution
420 // to the IHM may changed along the path to the fixed point
421 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
422 mapDescriptorToIHMcontributions;
424 // additionally, keep a mapping from descriptors to the
425 // merged in-coming initial context, because we want this
426 // initial context to be STRICTLY MONOTONIC
427 protected Hashtable<Descriptor, ReachGraph>
428 mapDescriptorToInitialContext;
430 // make the result for back edges analysis-wide STRICTLY
431 // MONOTONIC as well, but notice we use FlatNode as the
432 // key for this map: in case we want to consider other
433 // nodes as back edge's in future implementations
434 protected Hashtable<FlatNode, ReachGraph>
435 mapBackEdgeToMonotone;
438 public static final String arrayElementFieldName = "___element_";
439 static protected Hashtable<TypeDescriptor, FieldDescriptor>
442 // for controlling DOT file output
443 protected boolean writeFinalDOTs;
444 protected boolean writeAllIncrementalDOTs;
446 // supporting DOT output--when we want to write every
447 // partial method result, keep a tally for generating
449 protected Hashtable<Descriptor, Integer>
450 mapDescriptorToNumUpdates;
452 //map task descriptor to initial task parameter
453 protected Hashtable<Descriptor, ReachGraph>
454 mapDescriptorToReachGraph;
456 protected PointerMethod pm;
458 static protected Hashtable<FlatNode, ReachGraph> fn2rg =
459 new Hashtable<FlatNode, ReachGraph>();
462 // allocate various structures that are not local
463 // to a single class method--should be done once
464 protected void allocateStructures() {
465 descriptorsToAnalyze = new HashSet<Descriptor>();
467 mapDescriptorToCompleteReachGraph =
468 new Hashtable<Descriptor, ReachGraph>();
470 mapDescriptorToNumUpdates =
471 new Hashtable<Descriptor, Integer>();
473 mapDescriptorToSetDependents =
474 new Hashtable< Descriptor, Set<Descriptor> >();
476 mapFlatNewToAllocSite =
477 new Hashtable<FlatNew, AllocSite>();
479 mapDescriptorToIHMcontributions =
480 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
482 mapDescriptorToInitialContext =
483 new Hashtable<Descriptor, ReachGraph>();
485 mapBackEdgeToMonotone =
486 new Hashtable<FlatNode, ReachGraph>();
488 mapHrnIdToAllocSite =
489 new Hashtable<Integer, AllocSite>();
491 mapTypeToArrayField =
492 new Hashtable <TypeDescriptor, FieldDescriptor>();
494 if( state.DISJOINTDVISITSTACK ||
495 state.DISJOINTDVISITSTACKEESONTOP
497 descriptorsToVisitStack =
498 new Stack<DescriptorQWrapper>();
501 if( state.DISJOINTDVISITPQUE ) {
502 descriptorsToVisitQ =
503 new PriorityQueue<DescriptorQWrapper>();
506 descriptorsToVisitSet =
507 new HashSet<Descriptor>();
509 mapDescriptorToPriority =
510 new Hashtable<Descriptor, Integer>();
513 new HashSet<Descriptor>();
515 mapDescriptorToAllocSiteSet =
516 new Hashtable<Descriptor, HashSet<AllocSite> >();
518 mapDescriptorToReachGraph =
519 new Hashtable<Descriptor, ReachGraph>();
524 // this analysis generates a disjoint reachability
525 // graph for every reachable method in the program
526 public DisjointAnalysis( State s,
531 ) throws java.io.IOException {
532 init( s, tu, cg, l, ar );
535 protected void init( State state,
539 ArrayReferencees arrayReferencees
540 ) throws java.io.IOException {
542 analysisComplete = false;
545 this.typeUtil = typeUtil;
546 this.callGraph = callGraph;
547 this.liveness = liveness;
548 this.arrayReferencees = arrayReferencees;
549 this.allocationDepth = state.DISJOINTALLOCDEPTH;
550 this.releaseMode = state.DISJOINTRELEASEMODE;
552 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL;
553 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL;
555 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
556 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
557 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
558 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
559 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
560 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
561 this.snapNodeCounter = 0; // count nodes from 0
562 this.pm=new PointerMethod();
565 state.DISJOINTDVISITSTACK ||
566 state.DISJOINTDVISITPQUE ||
567 state.DISJOINTDVISITSTACKEESONTOP;
568 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
569 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
570 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
572 // set some static configuration for ReachGraphs
573 ReachGraph.allocationDepth = allocationDepth;
574 ReachGraph.typeUtil = typeUtil;
576 ReachGraph.debugCallSiteVisitsUntilExit = state.DISJOINTDEBUGCALLCOUNT;
578 allocateStructures();
580 double timeStartAnalysis = (double) System.nanoTime();
582 // start interprocedural fixed-point computation
584 analysisComplete=true;
586 double timeEndAnalysis = (double) System.nanoTime();
587 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow( 10.0, 9.0 ) );
588 String treport = String.format( "The reachability analysis took %.3f sec.", dt );
589 String justtime = String.format( "%.2f", dt );
590 System.out.println( treport );
592 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
596 if( state.DISJOINTWRITEIHMS ) {
600 if( state.DISJOINTALIASFILE != null ) {
602 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
604 writeAllSharingJava(state.DISJOINTALIASFILE,
607 state.DISJOINTALIASTAB,
615 protected boolean moreDescriptorsToVisit() {
616 if( state.DISJOINTDVISITSTACK ||
617 state.DISJOINTDVISITSTACKEESONTOP
619 return !descriptorsToVisitStack.isEmpty();
621 } else if( state.DISJOINTDVISITPQUE ) {
622 return !descriptorsToVisitQ.isEmpty();
625 throw new Error( "Neither descriptor visiting mode set" );
629 // fixed-point computation over the call graph--when a
630 // method's callees are updated, it must be reanalyzed
631 protected void analyzeMethods() throws java.io.IOException {
634 // This analysis does not support Bamboo at the moment,
635 // but if it does in the future we would initialize the
636 // set of descriptors to analyze as the program-reachable
637 // tasks and the methods callable by them. For Java,
638 // just methods reachable from the main method.
639 System.out.println( "Bamboo..." );
640 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
642 while (taskItr.hasNext()) {
643 TaskDescriptor td = (TaskDescriptor) taskItr.next();
644 if (!descriptorsToAnalyze.contains(td)) {
645 descriptorsToAnalyze.add(td);
646 descriptorsToAnalyze.addAll(callGraph.getAllMethods(td));
651 // add all methods transitively reachable from the
652 // source's main to set for analysis
653 mdSourceEntry = typeUtil.getMain();
654 descriptorsToAnalyze.add( mdSourceEntry );
655 descriptorsToAnalyze.addAll(
656 callGraph.getAllMethods( mdSourceEntry )
659 // fabricate an empty calling context that will call
660 // the source's main, but call graph doesn't know
661 // about it, so explicitly add it
662 makeAnalysisEntryMethod( mdSourceEntry );
663 descriptorsToAnalyze.add( mdAnalysisEntry );
666 // topologically sort according to the call graph so
667 // leaf calls are ordered first, smarter analysis order
668 // CHANGED: order leaf calls last!!
669 LinkedList<Descriptor> sortedDescriptors =
670 topologicalSort( descriptorsToAnalyze );
672 // add sorted descriptors to priority queue, and duplicate
673 // the queue as a set for efficiently testing whether some
674 // method is marked for analysis
676 Iterator<Descriptor> dItr = sortedDescriptors.iterator();
677 while( dItr.hasNext() ) {
678 Descriptor d = dItr.next();
680 mapDescriptorToPriority.put( d, new Integer( p ) );
682 if( state.DISJOINTDVISITSTACK ||
683 state.DISJOINTDVISITSTACKEESONTOP
685 descriptorsToVisitStack.add( new DescriptorQWrapper( p, d ) );
687 } else if( state.DISJOINTDVISITPQUE ) {
688 descriptorsToVisitQ.add( new DescriptorQWrapper( p, d ) );
691 descriptorsToVisitSet.add( d );
695 // analyze methods from the priority queue until it is empty
696 while( moreDescriptorsToVisit() ) {
699 if( state.DISJOINTDVISITSTACK ||
700 state.DISJOINTDVISITSTACKEESONTOP
702 d = descriptorsToVisitStack.pop().getDescriptor();
704 } else if( state.DISJOINTDVISITPQUE ) {
705 d = descriptorsToVisitQ.poll().getDescriptor();
708 assert descriptorsToVisitSet.contains( d );
709 descriptorsToVisitSet.remove( d );
711 // because the task or method descriptor just extracted
712 // was in the "to visit" set it either hasn't been analyzed
713 // yet, or some method that it depends on has been
714 // updated. Recompute a complete reachability graph for
715 // this task/method and compare it to any previous result.
716 // If there is a change detected, add any methods/tasks
717 // that depend on this one to the "to visit" set.
719 System.out.println( "Analyzing " + d );
721 if( state.DISJOINTDVISITSTACKEESONTOP ) {
722 assert calleesToEnqueue.isEmpty();
725 ReachGraph rg = analyzeMethod( d );
726 ReachGraph rgPrev = getPartial( d );
728 if( !rg.equals( rgPrev ) ) {
731 // results for d changed, so enqueue dependents
732 // of d for further analysis
733 Iterator<Descriptor> depsItr = getDependents( d ).iterator();
734 while( depsItr.hasNext() ) {
735 Descriptor dNext = depsItr.next();
739 if( state.DISJOINTDVISITSTACKEESONTOP ) {
740 depsItr = calleesToEnqueue.iterator();
741 while( depsItr.hasNext() ) {
742 Descriptor dNext = depsItr.next();
745 calleesToEnqueue.clear();
749 // we got the result result as the last visit
750 // to this method, but we might need to clean
752 if( state.DISJOINTDVISITSTACKEESONTOP ) {
753 calleesToEnqueue.clear();
760 protected ReachGraph analyzeMethod( Descriptor d )
761 throws java.io.IOException {
763 // get the flat code for this descriptor
765 if( d == mdAnalysisEntry ) {
766 fm = fmAnalysisEntry;
768 fm = state.getMethodFlat( d );
770 pm.analyzeMethod( fm );
772 // intraprocedural work set
773 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
774 flatNodesToVisit.add( fm );
776 Set<FlatNode> debugVisited = new HashSet<FlatNode>();
778 // mapping of current partial results
779 Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph =
780 new Hashtable<FlatNode, ReachGraph>();
782 // the set of return nodes partial results that will be combined as
783 // the final, conservative approximation of the entire method
784 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
786 while( !flatNodesToVisit.isEmpty() ) {
787 FlatNode fn = (FlatNode) flatNodesToVisit.iterator().next();
788 flatNodesToVisit.remove( fn );
790 debugVisited.add( fn );
792 // effect transfer function defined by this node,
793 // then compare it to the old graph at this node
794 // to see if anything was updated.
796 ReachGraph rg = new ReachGraph();
797 TaskDescriptor taskDesc;
798 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null){
799 if(mapDescriptorToReachGraph.containsKey(taskDesc)){
800 // retrieve existing reach graph if it is not first time
801 rg=mapDescriptorToReachGraph.get(taskDesc);
803 // create initial reach graph for a task
804 rg=createInitialTaskReachGraph((FlatMethod)fn);
806 mapDescriptorToReachGraph.put(taskDesc, rg);
810 // start by merging all node's parents' graphs
811 for( int i = 0; i < pm.numPrev(fn); ++i ) {
812 FlatNode pn = pm.getPrev(fn,i);
813 if( mapFlatNodeToReachGraph.containsKey( pn ) ) {
814 ReachGraph rgParent = mapFlatNodeToReachGraph.get( pn );
815 rg.merge( rgParent );
820 if( takeDebugSnapshots &&
821 d.getSymbol().equals( descSymbolDebug )
823 debugSnapshot( rg, fn, true );
827 // modify rg with appropriate transfer function
828 rg = analyzeFlatNode( d, fm, fn, setReturns, rg );
831 if( takeDebugSnapshots &&
832 d.getSymbol().equals( descSymbolDebug )
834 debugSnapshot( rg, fn, false );
839 // if the results of the new graph are different from
840 // the current graph at this node, replace the graph
841 // with the update and enqueue the children
842 ReachGraph rgPrev = mapFlatNodeToReachGraph.get( fn );
843 if( !rg.equals( rgPrev ) ) {
844 mapFlatNodeToReachGraph.put( fn, rg );
846 for( int i = 0; i < pm.numNext(fn); i++ ) {
847 FlatNode nn = pm.getNext(fn, i);
848 flatNodesToVisit.add( nn );
854 // assert that the fixed-point results for each
855 // node in the method is no smaller than the last
856 // time this method was analyzed (monotonicity)
858 Iterator<FlatNode> nItr = fm.getNodeSet().iterator();
859 while( nItr.hasNext() ) {
860 FlatNode fn = nItr.next();
861 ReachGraph last = fn2rg.get( fn );
862 ReachGraph newest = mapFlatNodeToReachGraph.get( fn );
864 if( newest == null ) {
865 System.out.println( "**********\nfn null result: "+fn+
866 "\nnum visited="+debugVisited.size()+", num in set="+fm.getNodeSet().size()+
867 "\nvisited:"+debugVisited );
870 assert newest != null;
872 if( !ReachGraph.isNoSmallerThan( last, newest ) ) {
873 last.writeGraph( "last", true, false, false, true, true );
874 newest.writeGraph( "newest", true, false, false, true, true );
875 throw new Error( "transfer func for "+fn+" was not monotic" );
878 fn2rg.put( fn, newest );
882 // end by merging all return nodes into a complete
883 // reach graph that represents all possible heap
884 // states after the flat method returns
885 ReachGraph completeGraph = new ReachGraph();
887 assert !setReturns.isEmpty();
888 Iterator retItr = setReturns.iterator();
889 while( retItr.hasNext() ) {
890 FlatReturnNode frn = (FlatReturnNode) retItr.next();
892 assert mapFlatNodeToReachGraph.containsKey( frn );
893 ReachGraph rgRet = mapFlatNodeToReachGraph.get( frn );
895 completeGraph.merge( rgRet );
899 if( takeDebugSnapshots &&
900 d.getSymbol().equals( descSymbolDebug )
902 // increment that we've visited the debug snap
903 // method, and reset the node counter
904 System.out.println( " @@@ debug snap at visit "+snapVisitCounter );
908 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
911 System.out.println( "!!! Stopping analysis after debug snap captures. !!!" );
917 return completeGraph;
922 analyzeFlatNode( Descriptor d,
923 FlatMethod fmContaining,
925 HashSet<FlatReturnNode> setRetNodes,
927 ) throws java.io.IOException {
930 // any variables that are no longer live should be
931 // nullified in the graph to reduce edges
932 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
939 // use node type to decide what transfer function
940 // to apply to the reachability graph
941 switch( fn.kind() ) {
943 case FKind.FlatMethod: {
944 // construct this method's initial heap model (IHM)
945 // since we're working on the FlatMethod, we know
946 // the incoming ReachGraph 'rg' is empty
948 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
949 getIHMcontributions( d );
951 Set entrySet = heapsFromCallers.entrySet();
952 Iterator itr = entrySet.iterator();
953 while( itr.hasNext() ) {
954 Map.Entry me = (Map.Entry) itr.next();
955 FlatCall fc = (FlatCall) me.getKey();
956 ReachGraph rgContrib = (ReachGraph) me.getValue();
958 assert fc.getMethod().equals( d );
960 rg.merge( rgContrib );
963 // additionally, we are enforcing STRICT MONOTONICITY for the
964 // method's initial context, so grow the context by whatever
965 // the previously computed context was, and put the most
966 // up-to-date context back in the map
967 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get( d );
968 rg.merge( rgPrevContext );
969 mapDescriptorToInitialContext.put( d, rg );
973 case FKind.FlatOpNode:
974 FlatOpNode fon = (FlatOpNode) fn;
975 if( fon.getOp().getOp() == Operation.ASSIGN ) {
978 rg.assignTempXEqualToTempY( lhs, rhs );
982 case FKind.FlatCastNode:
983 FlatCastNode fcn = (FlatCastNode) fn;
987 TypeDescriptor td = fcn.getType();
990 rg.assignTempXEqualToCastedTempY( lhs, rhs, td );
993 case FKind.FlatFieldNode:
994 FlatFieldNode ffn = (FlatFieldNode) fn;
997 fld = ffn.getField();
998 if( shouldAnalysisTrack( fld.getType() ) ) {
999 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fld );
1003 case FKind.FlatSetFieldNode:
1004 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1005 lhs = fsfn.getDst();
1006 fld = fsfn.getField();
1007 rhs = fsfn.getSrc();
1008 if( shouldAnalysisTrack( fld.getType() ) ) {
1009 rg.assignTempXFieldFEqualToTempY( lhs, fld, rhs );
1013 case FKind.FlatElementNode:
1014 FlatElementNode fen = (FlatElementNode) fn;
1017 if( shouldAnalysisTrack( lhs.getType() ) ) {
1019 assert rhs.getType() != null;
1020 assert rhs.getType().isArray();
1022 TypeDescriptor tdElement = rhs.getType().dereference();
1023 FieldDescriptor fdElement = getArrayField( tdElement );
1025 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fdElement );
1029 case FKind.FlatSetElementNode:
1030 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1032 if( arrayReferencees.doesNotCreateNewReaching( fsen ) ) {
1033 // skip this node if it cannot create new reachability paths
1037 lhs = fsen.getDst();
1038 rhs = fsen.getSrc();
1039 if( shouldAnalysisTrack( rhs.getType() ) ) {
1041 assert lhs.getType() != null;
1042 assert lhs.getType().isArray();
1044 TypeDescriptor tdElement = lhs.getType().dereference();
1045 FieldDescriptor fdElement = getArrayField( tdElement );
1047 rg.assignTempXFieldFEqualToTempY( lhs, fdElement, rhs );
1052 FlatNew fnn = (FlatNew) fn;
1054 if( shouldAnalysisTrack( lhs.getType() ) ) {
1055 AllocSite as = getAllocSiteFromFlatNewPRIVATE( fnn );
1056 rg.assignTempEqualToNewAlloc( lhs, as );
1060 case FKind.FlatCall: {
1061 //TODO: temporal fix for task descriptor case
1062 //MethodDescriptor mdCaller = fmContaining.getMethod();
1063 Descriptor mdCaller;
1064 if(fmContaining.getMethod()!=null){
1065 mdCaller = fmContaining.getMethod();
1067 mdCaller = fmContaining.getTask();
1069 FlatCall fc = (FlatCall) fn;
1070 MethodDescriptor mdCallee = fc.getMethod();
1071 FlatMethod fmCallee = state.getMethodFlat( mdCallee );
1073 boolean writeDebugDOTs =
1074 mdCaller.getSymbol().equals( state.DISJOINTDEBUGCALLER ) &&
1075 mdCallee.getSymbol().equals( state.DISJOINTDEBUGCALLEE );
1078 // calculate the heap this call site can reach--note this is
1079 // not used for the current call site transform, we are
1080 // grabbing this heap model for future analysis of the callees,
1081 // so if different results emerge we will return to this site
1082 ReachGraph heapForThisCall_old =
1083 getIHMcontribution( mdCallee, fc );
1085 // the computation of the callee-reachable heap
1086 // is useful for making the callee starting point
1087 // and for applying the call site transfer function
1088 Set<Integer> callerNodeIDsCopiedToCallee =
1089 new HashSet<Integer>();
1091 ReachGraph heapForThisCall_cur =
1092 rg.makeCalleeView( fc,
1094 callerNodeIDsCopiedToCallee,
1098 if( !heapForThisCall_cur.equals( heapForThisCall_old ) ) {
1099 // if heap at call site changed, update the contribution,
1100 // and reschedule the callee for analysis
1101 addIHMcontribution( mdCallee, fc, heapForThisCall_cur );
1103 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1104 calleesToEnqueue.add( mdCallee );
1106 enqueue( mdCallee );
1112 // the transformation for a call site should update the
1113 // current heap abstraction with any effects from the callee,
1114 // or if the method is virtual, the effects from any possible
1115 // callees, so find the set of callees...
1116 Set<MethodDescriptor> setPossibleCallees =
1117 new HashSet<MethodDescriptor>();
1119 if( mdCallee.isStatic() ) {
1120 setPossibleCallees.add( mdCallee );
1122 TypeDescriptor typeDesc = fc.getThis().getType();
1123 setPossibleCallees.addAll( callGraph.getMethods( mdCallee,
1128 ReachGraph rgMergeOfEffects = new ReachGraph();
1130 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1131 while( mdItr.hasNext() ) {
1132 MethodDescriptor mdPossible = mdItr.next();
1133 FlatMethod fmPossible = state.getMethodFlat( mdPossible );
1135 addDependent( mdPossible, // callee
1138 // don't alter the working graph (rg) until we compute a
1139 // result for every possible callee, merge them all together,
1140 // then set rg to that
1141 ReachGraph rgCopy = new ReachGraph();
1144 ReachGraph rgEffect = getPartial( mdPossible );
1146 if( rgEffect == null ) {
1147 // if this method has never been analyzed just schedule it
1148 // for analysis and skip over this call site for now
1149 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1150 calleesToEnqueue.add( mdPossible );
1152 enqueue( mdPossible );
1156 rgCopy.resolveMethodCall( fc,
1159 callerNodeIDsCopiedToCallee,
1164 rgMergeOfEffects.merge( rgCopy );
1168 // now that we've taken care of building heap models for
1169 // callee analysis, finish this transformation
1170 rg = rgMergeOfEffects;
1174 case FKind.FlatReturnNode:
1175 FlatReturnNode frn = (FlatReturnNode) fn;
1176 rhs = frn.getReturnTemp();
1177 if( rhs != null && shouldAnalysisTrack( rhs.getType() ) ) {
1178 rg.assignReturnEqualToTemp( rhs );
1180 setRetNodes.add( frn );
1186 // dead variables were removed before the above transfer function
1187 // was applied, so eliminate heap regions and edges that are no
1188 // longer part of the abstractly-live heap graph, and sweep up
1189 // and reachability effects that are altered by the reduction
1190 //rg.abstractGarbageCollect();
1194 // back edges are strictly monotonic
1195 if( pm.isBackEdge( fn ) ) {
1196 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get( fn );
1197 rg.merge( rgPrevResult );
1198 mapBackEdgeToMonotone.put( fn, rg );
1201 // at this point rg should be the correct update
1202 // by an above transfer function, or untouched if
1203 // the flat node type doesn't affect the heap
1209 // this method should generate integers strictly greater than zero!
1210 // special "shadow" regions are made from a heap region by negating
1212 static public Integer generateUniqueHeapRegionNodeID() {
1214 return new Integer( uniqueIDcount );
1219 static public FieldDescriptor getArrayField( TypeDescriptor tdElement ) {
1220 FieldDescriptor fdElement = mapTypeToArrayField.get( tdElement );
1221 if( fdElement == null ) {
1222 fdElement = new FieldDescriptor( new Modifiers( Modifiers.PUBLIC ),
1224 arrayElementFieldName,
1227 mapTypeToArrayField.put( tdElement, fdElement );
1234 private void writeFinalGraphs() {
1235 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
1236 Iterator itr = entrySet.iterator();
1237 while( itr.hasNext() ) {
1238 Map.Entry me = (Map.Entry) itr.next();
1239 Descriptor d = (Descriptor) me.getKey();
1240 ReachGraph rg = (ReachGraph) me.getValue();
1242 rg.writeGraph( "COMPLETE"+d,
1243 true, // write labels (variables)
1244 true, // selectively hide intermediate temp vars
1245 true, // prune unreachable heap regions
1246 false, // hide subset reachability states
1247 true ); // hide edge taints
1251 private void writeFinalIHMs() {
1252 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
1253 while( d2IHMsItr.hasNext() ) {
1254 Map.Entry me1 = (Map.Entry) d2IHMsItr.next();
1255 Descriptor d = (Descriptor) me1.getKey();
1256 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>) me1.getValue();
1258 Iterator fc2rgItr = IHMs.entrySet().iterator();
1259 while( fc2rgItr.hasNext() ) {
1260 Map.Entry me2 = (Map.Entry) fc2rgItr.next();
1261 FlatCall fc = (FlatCall) me2.getKey();
1262 ReachGraph rg = (ReachGraph) me2.getValue();
1264 rg.writeGraph( "IHMPARTFOR"+d+"FROM"+fc,
1265 true, // write labels (variables)
1266 true, // selectively hide intermediate temp vars
1267 true, // prune unreachable heap regions
1268 false, // hide subset reachability states
1269 true ); // hide edge taints
1275 protected ReachGraph getPartial( Descriptor d ) {
1276 return mapDescriptorToCompleteReachGraph.get( d );
1279 protected void setPartial( Descriptor d, ReachGraph rg ) {
1280 mapDescriptorToCompleteReachGraph.put( d, rg );
1282 // when the flag for writing out every partial
1283 // result is set, we should spit out the graph,
1284 // but in order to give it a unique name we need
1285 // to track how many partial results for this
1286 // descriptor we've already written out
1287 if( writeAllIncrementalDOTs ) {
1288 if( !mapDescriptorToNumUpdates.containsKey( d ) ) {
1289 mapDescriptorToNumUpdates.put( d, new Integer( 0 ) );
1291 Integer n = mapDescriptorToNumUpdates.get( d );
1293 rg.writeGraph( d+"COMPLETE"+String.format( "%05d", n ),
1294 true, // write labels (variables)
1295 true, // selectively hide intermediate temp vars
1296 true, // prune unreachable heap regions
1297 false, // hide subset reachability states
1298 true ); // hide edge taints
1300 mapDescriptorToNumUpdates.put( d, n + 1 );
1306 // return just the allocation site associated with one FlatNew node
1307 protected AllocSite getAllocSiteFromFlatNewPRIVATE( FlatNew fnew ) {
1309 if( !mapFlatNewToAllocSite.containsKey( fnew ) ) {
1310 AllocSite as = AllocSite.factory( allocationDepth,
1312 fnew.getDisjointId()
1315 // the newest nodes are single objects
1316 for( int i = 0; i < allocationDepth; ++i ) {
1317 Integer id = generateUniqueHeapRegionNodeID();
1318 as.setIthOldest( i, id );
1319 mapHrnIdToAllocSite.put( id, as );
1322 // the oldest node is a summary node
1323 as.setSummary( generateUniqueHeapRegionNodeID() );
1325 mapFlatNewToAllocSite.put( fnew, as );
1328 return mapFlatNewToAllocSite.get( fnew );
1332 public static boolean shouldAnalysisTrack( TypeDescriptor type ) {
1333 // don't track primitive types, but an array
1334 // of primitives is heap memory
1335 if( type.isImmutable() ) {
1336 return type.isArray();
1339 // everything else is an object
1343 protected int numMethodsAnalyzed() {
1344 return descriptorsToAnalyze.size();
1351 // Take in source entry which is the program's compiled entry and
1352 // create a new analysis entry, a method that takes no parameters
1353 // and appears to allocate the command line arguments and call the
1354 // source entry with them. The purpose of this analysis entry is
1355 // to provide a top-level method context with no parameters left.
1356 protected void makeAnalysisEntryMethod( MethodDescriptor mdSourceEntry ) {
1358 Modifiers mods = new Modifiers();
1359 mods.addModifier( Modifiers.PUBLIC );
1360 mods.addModifier( Modifiers.STATIC );
1362 TypeDescriptor returnType =
1363 new TypeDescriptor( TypeDescriptor.VOID );
1365 this.mdAnalysisEntry =
1366 new MethodDescriptor( mods,
1368 "analysisEntryMethod"
1371 TempDescriptor cmdLineArgs =
1372 new TempDescriptor( "args",
1373 mdSourceEntry.getParamType( 0 )
1377 new FlatNew( mdSourceEntry.getParamType( 0 ),
1382 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
1383 sourceEntryArgs[0] = cmdLineArgs;
1386 new FlatCall( mdSourceEntry,
1392 FlatReturnNode frn = new FlatReturnNode( null );
1394 FlatExit fe = new FlatExit();
1396 this.fmAnalysisEntry =
1397 new FlatMethod( mdAnalysisEntry,
1401 this.fmAnalysisEntry.addNext( fn );
1408 protected LinkedList<Descriptor> topologicalSort( Set<Descriptor> toSort ) {
1410 Set <Descriptor> discovered = new HashSet <Descriptor>();
1411 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
1413 Iterator<Descriptor> itr = toSort.iterator();
1414 while( itr.hasNext() ) {
1415 Descriptor d = itr.next();
1417 if( !discovered.contains( d ) ) {
1418 dfsVisit( d, toSort, sorted, discovered );
1425 // While we're doing DFS on call graph, remember
1426 // dependencies for efficient queuing of methods
1427 // during interprocedural analysis:
1429 // a dependent of a method decriptor d for this analysis is:
1430 // 1) a method or task that invokes d
1431 // 2) in the descriptorsToAnalyze set
1432 protected void dfsVisit( Descriptor d,
1433 Set <Descriptor> toSort,
1434 LinkedList<Descriptor> sorted,
1435 Set <Descriptor> discovered ) {
1436 discovered.add( d );
1438 // only methods have callers, tasks never do
1439 if( d instanceof MethodDescriptor ) {
1441 MethodDescriptor md = (MethodDescriptor) d;
1443 // the call graph is not aware that we have a fabricated
1444 // analysis entry that calls the program source's entry
1445 if( md == mdSourceEntry ) {
1446 if( !discovered.contains( mdAnalysisEntry ) ) {
1447 addDependent( mdSourceEntry, // callee
1448 mdAnalysisEntry // caller
1450 dfsVisit( mdAnalysisEntry, toSort, sorted, discovered );
1454 // otherwise call graph guides DFS
1455 Iterator itr = callGraph.getCallerSet( md ).iterator();
1456 while( itr.hasNext() ) {
1457 Descriptor dCaller = (Descriptor) itr.next();
1459 // only consider callers in the original set to analyze
1460 if( !toSort.contains( dCaller ) ) {
1464 if( !discovered.contains( dCaller ) ) {
1465 addDependent( md, // callee
1469 dfsVisit( dCaller, toSort, sorted, discovered );
1474 // for leaf-nodes last now!
1475 sorted.addLast( d );
1479 protected void enqueue( Descriptor d ) {
1480 if( !descriptorsToVisitSet.contains( d ) ) {
1481 Integer priority = mapDescriptorToPriority.get( d );
1483 if( state.DISJOINTDVISITSTACK ||
1484 state.DISJOINTDVISITSTACKEESONTOP
1486 descriptorsToVisitStack.add( new DescriptorQWrapper( priority,
1490 } else if( state.DISJOINTDVISITPQUE ) {
1491 descriptorsToVisitQ.add( new DescriptorQWrapper( priority,
1496 descriptorsToVisitSet.add( d );
1501 // a dependent of a method decriptor d for this analysis is:
1502 // 1) a method or task that invokes d
1503 // 2) in the descriptorsToAnalyze set
1504 protected void addDependent( Descriptor callee, Descriptor caller ) {
1505 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
1506 if( deps == null ) {
1507 deps = new HashSet<Descriptor>();
1510 mapDescriptorToSetDependents.put( callee, deps );
1513 protected Set<Descriptor> getDependents( Descriptor callee ) {
1514 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
1515 if( deps == null ) {
1516 deps = new HashSet<Descriptor>();
1517 mapDescriptorToSetDependents.put( callee, deps );
1523 public Hashtable<FlatCall, ReachGraph> getIHMcontributions( Descriptor d ) {
1525 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1526 mapDescriptorToIHMcontributions.get( d );
1528 if( heapsFromCallers == null ) {
1529 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
1530 mapDescriptorToIHMcontributions.put( d, heapsFromCallers );
1533 return heapsFromCallers;
1536 public ReachGraph getIHMcontribution( Descriptor d,
1539 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1540 getIHMcontributions( d );
1542 if( !heapsFromCallers.containsKey( fc ) ) {
1543 heapsFromCallers.put( fc, new ReachGraph() );
1546 return heapsFromCallers.get( fc );
1549 public void addIHMcontribution( Descriptor d,
1553 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1554 getIHMcontributions( d );
1556 heapsFromCallers.put( fc, rg );
1559 private AllocSite createParameterAllocSite( ReachGraph rg,
1560 TempDescriptor tempDesc
1563 FlatNew flatNew = new FlatNew( tempDesc.getType(), // type
1564 tempDesc, // param temp
1565 false, // global alloc?
1566 "param"+tempDesc // disjoint site ID string
1568 // create allocation site
1569 AllocSite as = AllocSite.factory( allocationDepth,
1571 flatNew.getDisjointId()
1573 for (int i = 0; i < allocationDepth; ++i) {
1574 Integer id = generateUniqueHeapRegionNodeID();
1575 as.setIthOldest(i, id);
1576 mapHrnIdToAllocSite.put(id, as);
1578 // the oldest node is a summary node
1579 as.setSummary( generateUniqueHeapRegionNodeID() );
1587 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc){
1589 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
1590 if(!typeDesc.isImmutable()){
1591 ClassDescriptor classDesc = typeDesc.getClassDesc();
1592 for (Iterator it = classDesc.getFields(); it.hasNext();) {
1593 FieldDescriptor field = (FieldDescriptor) it.next();
1594 TypeDescriptor fieldType = field.getType();
1595 if (shouldAnalysisTrack( fieldType )) {
1596 fieldSet.add(field);
1604 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha ){
1606 int dimCount=fd.getType().getArrayCount();
1607 HeapRegionNode prevNode=null;
1608 HeapRegionNode arrayEntryNode=null;
1609 for(int i=dimCount;i>0;i--){
1610 TypeDescriptor typeDesc=fd.getType().dereference();//hack to get instance of type desc
1611 typeDesc.setArrayCount(i);
1612 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
1613 HeapRegionNode hrnSummary ;
1614 if(!mapToExistingNode.containsKey(typeDesc)){
1619 as = createParameterAllocSite(rg, tempDesc);
1621 // make a new reference to allocated node
1623 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
1624 false, // single object?
1626 false, // out-of-context?
1627 as.getType(), // type
1628 as, // allocation site
1629 alpha, // inherent reach
1630 alpha, // current reach
1631 ExistPredSet.factory(rg.predTrue), // predicates
1632 tempDesc.toString() // description
1634 rg.id2hrn.put(as.getSummary(),hrnSummary);
1636 mapToExistingNode.put(typeDesc, hrnSummary);
1638 hrnSummary=mapToExistingNode.get(typeDesc);
1642 // make a new reference between new summary node and source
1643 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
1646 fd.getSymbol(), // field name
1648 ExistPredSet.factory(rg.predTrue) // predicates
1651 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
1652 prevNode=hrnSummary;
1653 arrayEntryNode=hrnSummary;
1655 // make a new reference between summary nodes of array
1656 RefEdge edgeToSummary = new RefEdge(prevNode, // source
1659 arrayElementFieldName, // field name
1661 ExistPredSet.factory(rg.predTrue) // predicates
1664 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
1665 prevNode=hrnSummary;
1670 // create a new obj node if obj has at least one non-primitive field
1671 TypeDescriptor type=fd.getType();
1672 if(getFieldSetTobeAnalyzed(type).size()>0){
1673 TypeDescriptor typeDesc=type.dereference();
1674 typeDesc.setArrayCount(0);
1675 if(!mapToExistingNode.containsKey(typeDesc)){
1676 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
1677 AllocSite as = createParameterAllocSite(rg, tempDesc);
1678 // make a new reference to allocated node
1679 HeapRegionNode hrnSummary =
1680 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
1681 false, // single object?
1683 false, // out-of-context?
1685 as, // allocation site
1686 alpha, // inherent reach
1687 alpha, // current reach
1688 ExistPredSet.factory(rg.predTrue), // predicates
1689 tempDesc.toString() // description
1691 rg.id2hrn.put(as.getSummary(),hrnSummary);
1692 mapToExistingNode.put(typeDesc, hrnSummary);
1693 RefEdge edgeToSummary = new RefEdge(prevNode, // source
1696 arrayElementFieldName, // field name
1698 ExistPredSet.factory(rg.predTrue) // predicates
1700 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
1701 prevNode=hrnSummary;
1703 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
1704 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null){
1705 RefEdge edgeToSummary = new RefEdge(prevNode, // source
1708 arrayElementFieldName, // field name
1710 ExistPredSet.factory(rg.predTrue) // predicates
1712 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
1714 prevNode=hrnSummary;
1718 map.put(arrayEntryNode, prevNode);
1719 return arrayEntryNode;
1722 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
1723 ReachGraph rg = new ReachGraph();
1724 TaskDescriptor taskDesc = fm.getTask();
1726 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
1727 Descriptor paramDesc = taskDesc.getParameter(idx);
1728 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
1730 // setup data structure
1731 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
1732 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
1733 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
1734 new Hashtable<TypeDescriptor, HeapRegionNode>();
1735 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
1736 new Hashtable<HeapRegionNode, HeapRegionNode>();
1737 Set<String> doneSet = new HashSet<String>();
1739 TempDescriptor tempDesc = fm.getParameter(idx);
1741 AllocSite as = createParameterAllocSite(rg, tempDesc);
1742 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
1743 Integer idNewest = as.getIthOldest(0);
1744 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
1746 // make a new reference to allocated node
1747 RefEdge edgeNew = new RefEdge(lnX, // source
1749 taskDesc.getParamType(idx), // type
1751 hrnNewest.getAlpha(), // beta
1752 ExistPredSet.factory(rg.predTrue) // predicates
1754 rg.addRefEdge(lnX, hrnNewest, edgeNew);
1756 // set-up a work set for class field
1757 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
1758 for (Iterator it = classDesc.getFields(); it.hasNext();) {
1759 FieldDescriptor fd = (FieldDescriptor) it.next();
1760 TypeDescriptor fieldType = fd.getType();
1761 if (shouldAnalysisTrack( fieldType )) {
1762 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
1763 newMap.put(hrnNewest, fd);
1764 workSet.add(newMap);
1768 int uniqueIdentifier = 0;
1769 while (!workSet.isEmpty()) {
1770 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
1772 workSet.remove(map);
1774 Set<HeapRegionNode> key = map.keySet();
1775 HeapRegionNode srcHRN = key.iterator().next();
1776 FieldDescriptor fd = map.get(srcHRN);
1777 TypeDescriptor type = fd.getType();
1778 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
1780 if (!doneSet.contains(doneSetIdentifier)) {
1781 doneSet.add(doneSetIdentifier);
1782 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
1783 // create new summary Node
1784 TempDescriptor td = new TempDescriptor("temp"
1785 + uniqueIdentifier, type);
1787 AllocSite allocSite;
1788 if(type.equals(paramTypeDesc)){
1789 //corresponding allocsite has already been created for a parameter variable.
1792 allocSite = createParameterAllocSite(rg, td);
1794 String strDesc = allocSite.toStringForDOT()
1796 TypeDescriptor allocType=allocSite.getType();
1798 HeapRegionNode hrnSummary;
1799 if(allocType.isArray() && allocType.getArrayCount()>0){
1800 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
1803 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
1804 false, // single object?
1806 false, // out-of-context?
1807 allocSite.getType(), // type
1808 allocSite, // allocation site
1809 hrnNewest.getAlpha(), // inherent reach
1810 hrnNewest.getAlpha(), // current reach
1811 ExistPredSet.factory(rg.predTrue), // predicates
1812 strDesc // description
1814 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
1816 // make a new reference to summary node
1817 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
1820 fd.getSymbol(), // field name
1821 hrnNewest.getAlpha(), // beta
1822 ExistPredSet.factory(rg.predTrue) // predicates
1825 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
1829 mapTypeToExistingSummaryNode.put(type, hrnSummary);
1831 // set-up a work set for fields of the class
1832 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
1833 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
1835 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
1837 HeapRegionNode newDstHRN;
1838 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)){
1839 //related heap region node is already exsited.
1840 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
1842 newDstHRN=hrnSummary;
1844 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
1845 if(!doneSet.contains(doneSetIdentifier)){
1846 // add new work item
1847 HashMap<HeapRegionNode, FieldDescriptor> newMap =
1848 new HashMap<HeapRegionNode, FieldDescriptor>();
1849 newMap.put(newDstHRN, fieldDescriptor);
1850 workSet.add(newMap);
1855 // if there exists corresponding summary node
1856 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
1858 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
1860 fd.getType(), // type
1861 fd.getSymbol(), // field name
1862 srcHRN.getAlpha(), // beta
1863 ExistPredSet.factory(rg.predTrue) // predicates
1865 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
1871 // debugSnapshot(rg, fm, true);
1875 // return all allocation sites in the method (there is one allocation
1876 // site per FlatNew node in a method)
1877 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
1878 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
1879 buildAllocationSiteSet(d);
1882 return mapDescriptorToAllocSiteSet.get(d);
1886 private void buildAllocationSiteSet(Descriptor d) {
1887 HashSet<AllocSite> s = new HashSet<AllocSite>();
1890 if( d instanceof MethodDescriptor ) {
1891 fm = state.getMethodFlat( (MethodDescriptor) d);
1893 assert d instanceof TaskDescriptor;
1894 fm = state.getMethodFlat( (TaskDescriptor) d);
1896 pm.analyzeMethod(fm);
1898 // visit every node in this FlatMethod's IR graph
1899 // and make a set of the allocation sites from the
1900 // FlatNew node's visited
1901 HashSet<FlatNode> visited = new HashSet<FlatNode>();
1902 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
1905 while( !toVisit.isEmpty() ) {
1906 FlatNode n = toVisit.iterator().next();
1908 if( n instanceof FlatNew ) {
1909 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
1915 for( int i = 0; i < pm.numNext(n); ++i ) {
1916 FlatNode child = pm.getNext(n, i);
1917 if( !visited.contains(child) ) {
1923 mapDescriptorToAllocSiteSet.put(d, s);
1926 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
1928 HashSet<AllocSite> out = new HashSet<AllocSite>();
1929 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
1930 HashSet<Descriptor> visited = new HashSet<Descriptor>();
1934 while (!toVisit.isEmpty()) {
1935 Descriptor d = toVisit.iterator().next();
1939 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
1940 Iterator asItr = asSet.iterator();
1941 while (asItr.hasNext()) {
1942 AllocSite as = (AllocSite) asItr.next();
1943 if (as.getDisjointAnalysisId() != null) {
1948 // enqueue callees of this method to be searched for
1949 // allocation sites also
1950 Set callees = callGraph.getCalleeSet(d);
1951 if (callees != null) {
1952 Iterator methItr = callees.iterator();
1953 while (methItr.hasNext()) {
1954 MethodDescriptor md = (MethodDescriptor) methItr.next();
1956 if (!visited.contains(md)) {
1967 private HashSet<AllocSite>
1968 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
1970 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
1971 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
1972 HashSet<Descriptor> visited = new HashSet<Descriptor>();
1976 // traverse this task and all methods reachable from this task
1977 while( !toVisit.isEmpty() ) {
1978 Descriptor d = toVisit.iterator().next();
1982 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
1983 Iterator asItr = asSet.iterator();
1984 while( asItr.hasNext() ) {
1985 AllocSite as = (AllocSite) asItr.next();
1986 TypeDescriptor typed = as.getType();
1987 if( typed != null ) {
1988 ClassDescriptor cd = typed.getClassDesc();
1989 if( cd != null && cd.hasFlags() ) {
1995 // enqueue callees of this method to be searched for
1996 // allocation sites also
1997 Set callees = callGraph.getCalleeSet(d);
1998 if( callees != null ) {
1999 Iterator methItr = callees.iterator();
2000 while( methItr.hasNext() ) {
2001 MethodDescriptor md = (MethodDescriptor) methItr.next();
2003 if( !visited.contains(md) ) {
2016 // get successive captures of the analysis state, use compiler
2018 boolean takeDebugSnapshots = false;
2019 String descSymbolDebug = null;
2020 boolean stopAfterCapture = false;
2021 int snapVisitCounter = 0;
2022 int snapNodeCounter = 0;
2023 int visitStartCapture = 0;
2024 int numVisitsToCapture = 0;
2027 void debugSnapshot( ReachGraph rg, FlatNode fn, boolean in ) {
2028 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
2036 if( snapVisitCounter >= visitStartCapture ) {
2037 System.out.println( " @@@ snapping visit="+snapVisitCounter+
2038 ", node="+snapNodeCounter+
2042 graphName = String.format( "snap%02d_%04din",
2046 graphName = String.format( "snap%02d_%04dout",
2051 graphName = graphName + fn;
2053 rg.writeGraph( graphName,
2054 true, // write labels (variables)
2055 true, // selectively hide intermediate temp vars
2056 true, // prune unreachable heap regions
2057 false, // hide subset reachability states
2058 true );// hide edge taints