1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
6 import Analysis.RBlockRelationAnalysis;
9 import IR.Tree.Modifiers;
14 public class DisjointAnalysis {
16 ///////////////////////////////////////////
18 // Public interface to discover possible
19 // aliases in the program under analysis
21 ///////////////////////////////////////////
23 public HashSet<AllocSite>
24 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
25 checkAnalysisComplete();
26 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
29 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
30 checkAnalysisComplete();
31 return getAllocSiteFromFlatNewPRIVATE(fn);
34 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
35 checkAnalysisComplete();
36 return mapHrnIdToAllocSite.get(id);
39 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
42 checkAnalysisComplete();
43 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
44 FlatMethod fm=state.getMethodFlat(taskOrMethod);
46 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
49 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
50 int paramIndex, AllocSite alloc) {
51 checkAnalysisComplete();
52 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
53 FlatMethod fm=state.getMethodFlat(taskOrMethod);
55 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
58 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
59 AllocSite alloc, int paramIndex) {
60 checkAnalysisComplete();
61 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
62 FlatMethod fm=state.getMethodFlat(taskOrMethod);
64 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
67 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
68 AllocSite alloc1, AllocSite alloc2) {
69 checkAnalysisComplete();
70 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
72 return rg.mayReachSharedObjects(alloc1, alloc2);
75 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
76 checkAnalysisComplete();
80 Iterator<HeapRegionNode> i = s.iterator();
82 HeapRegionNode n = i.next();
84 AllocSite as = n.getAllocSite();
86 out += " " + n.toString() + ",\n";
88 out += " " + n.toString() + ": " + as.toStringVerbose()
97 // use the methods given above to check every possible sharing class
98 // between task parameters and flagged allocation sites reachable
100 public void writeAllSharing(String outputFile,
103 boolean tabularOutput,
106 throws java.io.IOException {
107 checkAnalysisComplete();
109 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
111 if (!tabularOutput) {
112 bw.write("Conducting ownership analysis with allocation depth = "
113 + allocationDepth + "\n");
114 bw.write(timeReport + "\n");
119 // look through every task for potential sharing
120 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
121 while (taskItr.hasNext()) {
122 TaskDescriptor td = (TaskDescriptor) taskItr.next();
124 if (!tabularOutput) {
125 bw.write("\n---------" + td + "--------\n");
128 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
130 Set<HeapRegionNode> common;
132 // for each task parameter, check for sharing classes with
133 // other task parameters and every allocation site
134 // reachable from this task
135 boolean foundSomeSharing = false;
137 FlatMethod fm = state.getMethodFlat(td);
138 for (int i = 0; i < fm.numParameters(); ++i) {
140 // skip parameters with types that cannot reference
142 if( !shouldAnalysisTrack( fm.getParameter( i ).getType() ) ) {
146 // for the ith parameter check for sharing classes to all
147 // higher numbered parameters
148 for (int j = i + 1; j < fm.numParameters(); ++j) {
150 // skip parameters with types that cannot reference
152 if( !shouldAnalysisTrack( fm.getParameter( j ).getType() ) ) {
157 common = hasPotentialSharing(td, i, j);
158 if (!common.isEmpty()) {
159 foundSomeSharing = true;
161 if (!tabularOutput) {
162 bw.write("Potential sharing between parameters " + i
163 + " and " + j + ".\n");
164 bw.write(prettyPrintNodeSet(common) + "\n");
169 // for the ith parameter, check for sharing classes against
170 // the set of allocation sites reachable from this
172 Iterator allocItr = allocSites.iterator();
173 while (allocItr.hasNext()) {
174 AllocSite as = (AllocSite) allocItr.next();
175 common = hasPotentialSharing(td, i, as);
176 if (!common.isEmpty()) {
177 foundSomeSharing = true;
179 if (!tabularOutput) {
180 bw.write("Potential sharing between parameter " + i
181 + " and " + as.getFlatNew() + ".\n");
182 bw.write(prettyPrintNodeSet(common) + "\n");
188 // for each allocation site check for sharing classes with
189 // other allocation sites in the context of execution
191 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
192 Iterator allocItr1 = allocSites.iterator();
193 while (allocItr1.hasNext()) {
194 AllocSite as1 = (AllocSite) allocItr1.next();
196 Iterator allocItr2 = allocSites.iterator();
197 while (allocItr2.hasNext()) {
198 AllocSite as2 = (AllocSite) allocItr2.next();
200 if (!outerChecked.contains(as2)) {
201 common = hasPotentialSharing(td, as1, as2);
203 if (!common.isEmpty()) {
204 foundSomeSharing = true;
206 if (!tabularOutput) {
207 bw.write("Potential sharing between "
208 + as1.getFlatNew() + " and "
209 + as2.getFlatNew() + ".\n");
210 bw.write(prettyPrintNodeSet(common) + "\n");
216 outerChecked.add(as1);
219 if (!foundSomeSharing) {
220 if (!tabularOutput) {
221 bw.write("No sharing between flagged objects in Task " + td
229 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
230 + " & " + numMethodsAnalyzed() + " \\\\\n");
232 bw.write("\nNumber sharing classes: "+numSharing);
238 // this version of writeAllSharing is for Java programs that have no tasks
239 public void writeAllSharingJava(String outputFile,
242 boolean tabularOutput,
245 throws java.io.IOException {
246 checkAnalysisComplete();
252 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
254 bw.write("Conducting disjoint reachability analysis with allocation depth = "
255 + allocationDepth + "\n");
256 bw.write(timeReport + "\n\n");
258 boolean foundSomeSharing = false;
260 Descriptor d = typeUtil.getMain();
261 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
263 // for each allocation site check for sharing classes with
264 // other allocation sites in the context of execution
266 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
267 Iterator allocItr1 = allocSites.iterator();
268 while (allocItr1.hasNext()) {
269 AllocSite as1 = (AllocSite) allocItr1.next();
271 Iterator allocItr2 = allocSites.iterator();
272 while (allocItr2.hasNext()) {
273 AllocSite as2 = (AllocSite) allocItr2.next();
275 if (!outerChecked.contains(as2)) {
276 Set<HeapRegionNode> common = hasPotentialSharing(d,
279 if (!common.isEmpty()) {
280 foundSomeSharing = true;
281 bw.write("Potential sharing between "
282 + as1.getDisjointAnalysisId() + " and "
283 + as2.getDisjointAnalysisId() + ".\n");
284 bw.write(prettyPrintNodeSet(common) + "\n");
290 outerChecked.add(as1);
293 if (!foundSomeSharing) {
294 bw.write("No sharing classes between flagged objects found.\n");
296 bw.write("\nNumber sharing classes: "+numSharing);
299 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
304 ///////////////////////////////////////////
306 // end public interface
308 ///////////////////////////////////////////
310 protected void checkAnalysisComplete() {
311 if( !analysisComplete ) {
312 throw new Error("Warning: public interface method called while analysis is running.");
317 // run in faster mode, only when bugs wrung out!
318 public static boolean releaseMode;
320 // use command line option to set this, analysis
321 // should attempt to be deterministic
322 public static boolean determinismDesired;
324 // when we want to enforce determinism in the
325 // analysis we need to sort descriptors rather
326 // than toss them in efficient sets, use this
327 public static DescriptorComparator dComp =
328 new DescriptorComparator();
331 // data from the compiler
333 public CallGraph callGraph;
334 public Liveness liveness;
335 public ArrayReferencees arrayReferencees;
336 public RBlockRelationAnalysis rblockRel;
337 public TypeUtil typeUtil;
338 public int allocationDepth;
340 protected boolean doEffectsAnalysis = false;
341 protected EffectsAnalysis effectsAnalysis;
343 // data structure for public interface
344 private Hashtable< Descriptor, HashSet<AllocSite> >
345 mapDescriptorToAllocSiteSet;
348 // for public interface methods to warn that they
349 // are grabbing results during analysis
350 private boolean analysisComplete;
353 // used to identify HeapRegionNode objects
354 // A unique ID equates an object in one
355 // ownership graph with an object in another
356 // graph that logically represents the same
358 // start at 10 and increment to reserve some
359 // IDs for special purposes
360 static protected int uniqueIDcount = 10;
363 // An out-of-scope method created by the
364 // analysis that has no parameters, and
365 // appears to allocate the command line
366 // arguments, then invoke the source code's
367 // main method. The purpose of this is to
368 // provide the analysis with an explicit
369 // top-level context with no parameters
370 protected MethodDescriptor mdAnalysisEntry;
371 protected FlatMethod fmAnalysisEntry;
373 // main method defined by source program
374 protected MethodDescriptor mdSourceEntry;
376 // the set of task and/or method descriptors
377 // reachable in call graph
378 protected Set<Descriptor>
379 descriptorsToAnalyze;
381 // current descriptors to visit in fixed-point
382 // interprocedural analysis, prioritized by
383 // dependency in the call graph
384 protected Stack<Descriptor>
385 descriptorsToVisitStack;
386 protected PriorityQueue<DescriptorQWrapper>
389 // a duplication of the above structure, but
390 // for efficient testing of inclusion
391 protected HashSet<Descriptor>
392 descriptorsToVisitSet;
394 // storage for priorities (doesn't make sense)
395 // to add it to the Descriptor class, just in
397 protected Hashtable<Descriptor, Integer>
398 mapDescriptorToPriority;
400 // when analyzing a method and scheduling more:
401 // remember set of callee's enqueued for analysis
402 // so they can be put on top of the callers in
403 // the stack-visit mode
404 protected Set<Descriptor>
407 // maps a descriptor to its current partial result
408 // from the intraprocedural fixed-point analysis--
409 // then the interprocedural analysis settles, this
410 // mapping will have the final results for each
412 protected Hashtable<Descriptor, ReachGraph>
413 mapDescriptorToCompleteReachGraph;
415 // maps a descriptor to its known dependents: namely
416 // methods or tasks that call the descriptor's method
417 // AND are part of this analysis (reachable from main)
418 protected Hashtable< Descriptor, Set<Descriptor> >
419 mapDescriptorToSetDependents;
421 // maps each flat new to one analysis abstraction
422 // allocate site object, these exist outside reach graphs
423 protected Hashtable<FlatNew, AllocSite>
424 mapFlatNewToAllocSite;
426 // maps intergraph heap region IDs to intergraph
427 // allocation sites that created them, a redundant
428 // structure for efficiency in some operations
429 protected Hashtable<Integer, AllocSite>
432 // maps a method to its initial heap model (IHM) that
433 // is the set of reachability graphs from every caller
434 // site, all merged together. The reason that we keep
435 // them separate is that any one call site's contribution
436 // to the IHM may changed along the path to the fixed point
437 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
438 mapDescriptorToIHMcontributions;
440 // additionally, keep a mapping from descriptors to the
441 // merged in-coming initial context, because we want this
442 // initial context to be STRICTLY MONOTONIC
443 protected Hashtable<Descriptor, ReachGraph>
444 mapDescriptorToInitialContext;
446 // make the result for back edges analysis-wide STRICTLY
447 // MONOTONIC as well, but notice we use FlatNode as the
448 // key for this map: in case we want to consider other
449 // nodes as back edge's in future implementations
450 protected Hashtable<FlatNode, ReachGraph>
451 mapBackEdgeToMonotone;
454 public static final String arrayElementFieldName = "___element_";
455 static protected Hashtable<TypeDescriptor, FieldDescriptor>
458 // for controlling DOT file output
459 protected boolean writeFinalDOTs;
460 protected boolean writeAllIncrementalDOTs;
462 // supporting DOT output--when we want to write every
463 // partial method result, keep a tally for generating
465 protected Hashtable<Descriptor, Integer>
466 mapDescriptorToNumUpdates;
468 //map task descriptor to initial task parameter
469 protected Hashtable<Descriptor, ReachGraph>
470 mapDescriptorToReachGraph;
472 protected PointerMethod pm;
474 static protected Hashtable<FlatNode, ReachGraph> fn2rg =
475 new Hashtable<FlatNode, ReachGraph>();
477 private Hashtable<FlatCall, Descriptor> fc2enclosing;
480 // allocate various structures that are not local
481 // to a single class method--should be done once
482 protected void allocateStructures() {
484 if( determinismDesired ) {
485 // use an ordered set
486 descriptorsToAnalyze = new TreeSet<Descriptor>( dComp );
488 // otherwise use a speedy hashset
489 descriptorsToAnalyze = new HashSet<Descriptor>();
492 mapDescriptorToCompleteReachGraph =
493 new Hashtable<Descriptor, ReachGraph>();
495 mapDescriptorToNumUpdates =
496 new Hashtable<Descriptor, Integer>();
498 mapDescriptorToSetDependents =
499 new Hashtable< Descriptor, Set<Descriptor> >();
501 mapFlatNewToAllocSite =
502 new Hashtable<FlatNew, AllocSite>();
504 mapDescriptorToIHMcontributions =
505 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
507 mapDescriptorToInitialContext =
508 new Hashtable<Descriptor, ReachGraph>();
510 mapBackEdgeToMonotone =
511 new Hashtable<FlatNode, ReachGraph>();
513 mapHrnIdToAllocSite =
514 new Hashtable<Integer, AllocSite>();
516 mapTypeToArrayField =
517 new Hashtable <TypeDescriptor, FieldDescriptor>();
519 if( state.DISJOINTDVISITSTACK ||
520 state.DISJOINTDVISITSTACKEESONTOP
522 descriptorsToVisitStack =
523 new Stack<Descriptor>();
526 if( state.DISJOINTDVISITPQUE ) {
527 descriptorsToVisitQ =
528 new PriorityQueue<DescriptorQWrapper>();
531 descriptorsToVisitSet =
532 new HashSet<Descriptor>();
534 mapDescriptorToPriority =
535 new Hashtable<Descriptor, Integer>();
538 new HashSet<Descriptor>();
540 mapDescriptorToAllocSiteSet =
541 new Hashtable<Descriptor, HashSet<AllocSite> >();
543 mapDescriptorToReachGraph =
544 new Hashtable<Descriptor, ReachGraph>();
546 pm = new PointerMethod();
548 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
553 // this analysis generates a disjoint reachability
554 // graph for every reachable method in the program
555 public DisjointAnalysis( State s,
560 RBlockRelationAnalysis rra
562 init( s, tu, cg, l, ar, rra );
565 protected void init( State state,
569 ArrayReferencees arrayReferencees,
570 RBlockRelationAnalysis rra
573 analysisComplete = false;
576 this.typeUtil = typeUtil;
577 this.callGraph = callGraph;
578 this.liveness = liveness;
579 this.arrayReferencees = arrayReferencees;
580 this.rblockRel = rra;
582 if( rblockRel != null ) {
583 doEffectsAnalysis = true;
584 effectsAnalysis = new EffectsAnalysis();
587 this.allocationDepth = state.DISJOINTALLOCDEPTH;
588 this.releaseMode = state.DISJOINTRELEASEMODE;
589 this.determinismDesired = state.DISJOINTDETERMINISM;
591 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL;
592 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL;
594 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
595 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
596 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
597 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
598 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
599 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
600 this.snapNodeCounter = 0; // count nodes from 0
603 state.DISJOINTDVISITSTACK ||
604 state.DISJOINTDVISITPQUE ||
605 state.DISJOINTDVISITSTACKEESONTOP;
606 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
607 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
608 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
610 // set some static configuration for ReachGraphs
611 ReachGraph.allocationDepth = allocationDepth;
612 ReachGraph.typeUtil = typeUtil;
614 ReachGraph.debugCallSiteVisitStartCapture
615 = state.DISJOINTDEBUGCALLVISITTOSTART;
617 ReachGraph.debugCallSiteNumVisitsToCapture
618 = state.DISJOINTDEBUGCALLNUMVISITS;
620 ReachGraph.debugCallSiteStopAfter
621 = state.DISJOINTDEBUGCALLSTOPAFTER;
623 ReachGraph.debugCallSiteVisitCounter
624 = 0; // count visits from 1, is incremented before first visit
628 allocateStructures();
630 double timeStartAnalysis = (double) System.nanoTime();
632 // start interprocedural fixed-point computation
635 } catch( IOException e ) {
636 throw new Error( "IO Exception while writing disjointness analysis output." );
639 analysisComplete=true;
641 double timeEndAnalysis = (double) System.nanoTime();
642 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow( 10.0, 9.0 ) );
643 String treport = String.format( "The reachability analysis took %.3f sec.", dt );
644 String justtime = String.format( "%.2f", dt );
645 System.out.println( treport );
648 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
652 if( state.DISJOINTWRITEIHMS ) {
656 if( state.DISJOINTWRITEINITCONTEXTS ) {
657 writeInitialContexts();
660 if( state.DISJOINTALIASFILE != null ) {
662 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
664 writeAllSharingJava(state.DISJOINTALIASFILE,
667 state.DISJOINTALIASTAB,
672 } catch( IOException e ) {
673 throw new Error( "IO Exception while writing disjointness analysis output." );
676 if( doEffectsAnalysis ) {
677 effectsAnalysis.writeEffectsPerMethodAndRBlock( "effects.txt" );
682 protected boolean moreDescriptorsToVisit() {
683 if( state.DISJOINTDVISITSTACK ||
684 state.DISJOINTDVISITSTACKEESONTOP
686 return !descriptorsToVisitStack.isEmpty();
688 } else if( state.DISJOINTDVISITPQUE ) {
689 return !descriptorsToVisitQ.isEmpty();
692 throw new Error( "Neither descriptor visiting mode set" );
696 // fixed-point computation over the call graph--when a
697 // method's callees are updated, it must be reanalyzed
698 protected void analyzeMethods() throws java.io.IOException {
700 // task or non-task (java) mode determines what the roots
701 // of the call chain are, and establishes the set of methods
702 // reachable from the roots that will be analyzed
705 System.out.println( "Bamboo mode..." );
707 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
708 while( taskItr.hasNext() ) {
709 TaskDescriptor td = (TaskDescriptor) taskItr.next();
710 if( !descriptorsToAnalyze.contains( td ) ) {
711 // add all methods transitively reachable from the
713 descriptorsToAnalyze.add( td );
714 descriptorsToAnalyze.addAll( callGraph.getAllMethods( td ) );
719 System.out.println( "Java mode..." );
721 // add all methods transitively reachable from the
722 // source's main to set for analysis
723 mdSourceEntry = typeUtil.getMain();
724 descriptorsToAnalyze.add( mdSourceEntry );
725 descriptorsToAnalyze.addAll( callGraph.getAllMethods( mdSourceEntry ) );
727 // fabricate an empty calling context that will call
728 // the source's main, but call graph doesn't know
729 // about it, so explicitly add it
730 makeAnalysisEntryMethod( mdSourceEntry );
731 descriptorsToAnalyze.add( mdAnalysisEntry );
735 // now, depending on the interprocedural mode for visiting
736 // methods, set up the needed data structures
738 if( state.DISJOINTDVISITPQUE ) {
740 // topologically sort according to the call graph so
741 // leaf calls are last, helps build contexts up first
742 LinkedList<Descriptor> sortedDescriptors =
743 topologicalSort( descriptorsToAnalyze );
745 // add sorted descriptors to priority queue, and duplicate
746 // the queue as a set for efficiently testing whether some
747 // method is marked for analysis
749 Iterator<Descriptor> dItr;
751 // for the priority queue, give items at the head
752 // of the sorted list a low number (highest priority)
753 while( !sortedDescriptors.isEmpty() ) {
754 Descriptor d = sortedDescriptors.removeFirst();
755 mapDescriptorToPriority.put( d, new Integer( p ) );
756 descriptorsToVisitQ.add( new DescriptorQWrapper( p, d ) );
757 descriptorsToVisitSet.add( d );
761 } else if( state.DISJOINTDVISITSTACK ||
762 state.DISJOINTDVISITSTACKEESONTOP
764 // if we're doing the stack scheme, just throw the root
765 // method or tasks on the stack
767 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
768 while( taskItr.hasNext() ) {
769 TaskDescriptor td = (TaskDescriptor) taskItr.next();
770 descriptorsToVisitStack.add( td );
771 descriptorsToVisitSet.add( td );
775 descriptorsToVisitStack.add( mdAnalysisEntry );
776 descriptorsToVisitSet.add( mdAnalysisEntry );
780 throw new Error( "Unknown method scheduling mode" );
784 // analyze scheduled methods until there are no more to visit
785 while( moreDescriptorsToVisit() ) {
788 if( state.DISJOINTDVISITSTACK ||
789 state.DISJOINTDVISITSTACKEESONTOP
791 d = descriptorsToVisitStack.pop();
793 } else if( state.DISJOINTDVISITPQUE ) {
794 d = descriptorsToVisitQ.poll().getDescriptor();
797 assert descriptorsToVisitSet.contains( d );
798 descriptorsToVisitSet.remove( d );
800 // because the task or method descriptor just extracted
801 // was in the "to visit" set it either hasn't been analyzed
802 // yet, or some method that it depends on has been
803 // updated. Recompute a complete reachability graph for
804 // this task/method and compare it to any previous result.
805 // If there is a change detected, add any methods/tasks
806 // that depend on this one to the "to visit" set.
808 System.out.println( "Analyzing " + d );
810 if( state.DISJOINTDVISITSTACKEESONTOP ) {
811 assert calleesToEnqueue.isEmpty();
814 ReachGraph rg = analyzeMethod( d );
815 ReachGraph rgPrev = getPartial( d );
817 if( !rg.equals( rgPrev ) ) {
820 if( state.DISJOINTDEBUGSCHEDULING ) {
821 System.out.println( " complete graph changed, scheduling callers for analysis:" );
824 // results for d changed, so enqueue dependents
825 // of d for further analysis
826 Iterator<Descriptor> depsItr = getDependents( d ).iterator();
827 while( depsItr.hasNext() ) {
828 Descriptor dNext = depsItr.next();
831 if( state.DISJOINTDEBUGSCHEDULING ) {
832 System.out.println( " "+dNext );
837 // whether or not the method under analysis changed,
838 // we may have some callees that are scheduled for
839 // more analysis, and they should go on the top of
840 // the stack now (in other method-visiting modes they
841 // are already enqueued at this point
842 if( state.DISJOINTDVISITSTACKEESONTOP ) {
843 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
844 while( depsItr.hasNext() ) {
845 Descriptor dNext = depsItr.next();
848 calleesToEnqueue.clear();
854 protected ReachGraph analyzeMethod( Descriptor d )
855 throws java.io.IOException {
857 // get the flat code for this descriptor
859 if( d == mdAnalysisEntry ) {
860 fm = fmAnalysisEntry;
862 fm = state.getMethodFlat( d );
864 pm.analyzeMethod( fm );
866 // intraprocedural work set
867 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
868 flatNodesToVisit.add( fm );
870 // if determinism is desired by client, shadow the
871 // set with a queue to make visit order deterministic
872 Queue<FlatNode> flatNodesToVisitQ = null;
873 if( determinismDesired ) {
874 flatNodesToVisitQ = new LinkedList<FlatNode>();
875 flatNodesToVisitQ.add( fm );
878 // mapping of current partial results
879 Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph =
880 new Hashtable<FlatNode, ReachGraph>();
882 // the set of return nodes partial results that will be combined as
883 // the final, conservative approximation of the entire method
884 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
886 while( !flatNodesToVisit.isEmpty() ) {
889 if( determinismDesired ) {
890 assert !flatNodesToVisitQ.isEmpty();
891 fn = flatNodesToVisitQ.remove();
893 fn = flatNodesToVisit.iterator().next();
895 flatNodesToVisit.remove( fn );
897 // effect transfer function defined by this node,
898 // then compare it to the old graph at this node
899 // to see if anything was updated.
901 ReachGraph rg = new ReachGraph();
902 TaskDescriptor taskDesc;
903 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null){
904 if(mapDescriptorToReachGraph.containsKey(taskDesc)){
905 // retrieve existing reach graph if it is not first time
906 rg=mapDescriptorToReachGraph.get(taskDesc);
908 // create initial reach graph for a task
909 rg=createInitialTaskReachGraph((FlatMethod)fn);
911 mapDescriptorToReachGraph.put(taskDesc, rg);
915 // start by merging all node's parents' graphs
916 for( int i = 0; i < pm.numPrev(fn); ++i ) {
917 FlatNode pn = pm.getPrev(fn,i);
918 if( mapFlatNodeToReachGraph.containsKey( pn ) ) {
919 ReachGraph rgParent = mapFlatNodeToReachGraph.get( pn );
920 rg.merge( rgParent );
925 if( takeDebugSnapshots &&
926 d.getSymbol().equals( descSymbolDebug )
928 debugSnapshot( rg, fn, true );
932 // modify rg with appropriate transfer function
933 rg = analyzeFlatNode( d, fm, fn, setReturns, rg );
936 if( takeDebugSnapshots &&
937 d.getSymbol().equals( descSymbolDebug )
939 debugSnapshot( rg, fn, false );
944 // if the results of the new graph are different from
945 // the current graph at this node, replace the graph
946 // with the update and enqueue the children
947 ReachGraph rgPrev = mapFlatNodeToReachGraph.get( fn );
948 if( !rg.equals( rgPrev ) ) {
949 mapFlatNodeToReachGraph.put( fn, rg );
951 for( int i = 0; i < pm.numNext( fn ); i++ ) {
952 FlatNode nn = pm.getNext( fn, i );
954 flatNodesToVisit.add( nn );
955 if( determinismDesired ) {
956 flatNodesToVisitQ.add( nn );
963 // end by merging all return nodes into a complete
964 // reach graph that represents all possible heap
965 // states after the flat method returns
966 ReachGraph completeGraph = new ReachGraph();
968 assert !setReturns.isEmpty();
969 Iterator retItr = setReturns.iterator();
970 while( retItr.hasNext() ) {
971 FlatReturnNode frn = (FlatReturnNode) retItr.next();
973 assert mapFlatNodeToReachGraph.containsKey( frn );
974 ReachGraph rgRet = mapFlatNodeToReachGraph.get( frn );
976 completeGraph.merge( rgRet );
980 if( takeDebugSnapshots &&
981 d.getSymbol().equals( descSymbolDebug )
983 // increment that we've visited the debug snap
984 // method, and reset the node counter
985 System.out.println( " @@@ debug snap at visit "+snapVisitCounter );
989 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
992 System.out.println( "!!! Stopping analysis after debug snap captures. !!!" );
998 return completeGraph;
1002 protected ReachGraph
1003 analyzeFlatNode( Descriptor d,
1004 FlatMethod fmContaining,
1006 HashSet<FlatReturnNode> setRetNodes,
1008 ) throws java.io.IOException {
1011 // any variables that are no longer live should be
1012 // nullified in the graph to reduce edges
1013 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1016 if( doEffectsAnalysis && && fmContaining != fmAnalysisEntry
1017 rra.isEndOfRegion(fn)){
1018 rg.clearAccessibleVarSet();
1019 also need to clear stall mapping
1025 FieldDescriptor fld;
1027 // use node type to decide what transfer function
1028 // to apply to the reachability graph
1029 switch( fn.kind() ) {
1031 case FKind.FlatMethod: {
1032 // construct this method's initial heap model (IHM)
1033 // since we're working on the FlatMethod, we know
1034 // the incoming ReachGraph 'rg' is empty
1036 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1037 getIHMcontributions( d );
1039 Set entrySet = heapsFromCallers.entrySet();
1040 Iterator itr = entrySet.iterator();
1041 while( itr.hasNext() ) {
1042 Map.Entry me = (Map.Entry) itr.next();
1043 FlatCall fc = (FlatCall) me.getKey();
1044 ReachGraph rgContrib = (ReachGraph) me.getValue();
1046 assert fc.getMethod().equals( d );
1048 rg.merge( rgContrib );
1051 // additionally, we are enforcing STRICT MONOTONICITY for the
1052 // method's initial context, so grow the context by whatever
1053 // the previously computed context was, and put the most
1054 // up-to-date context back in the map
1055 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get( d );
1056 rg.merge( rgPrevContext );
1057 mapDescriptorToInitialContext.put( d, rg );
1061 case FKind.FlatOpNode:
1062 FlatOpNode fon = (FlatOpNode) fn;
1063 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1064 lhs = fon.getDest();
1065 rhs = fon.getLeft();
1066 rg.assignTempXEqualToTempY( lhs, rhs );
1070 case FKind.FlatCastNode:
1071 FlatCastNode fcn = (FlatCastNode) fn;
1075 TypeDescriptor td = fcn.getType();
1078 rg.assignTempXEqualToCastedTempY( lhs, rhs, td );
1081 case FKind.FlatFieldNode:
1082 FlatFieldNode ffn = (FlatFieldNode) fn;
1085 fld = ffn.getField();
1086 if( shouldAnalysisTrack( fld.getType() ) ) {
1087 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fld );
1089 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1090 FlatSESEEnterNode seseContaining =
1091 rblockRel.getRBlockStacks( fmContaining, fn ).peek();
1093 effectsAnalysis.analyzeFlatFieldNode( fmContaining,
1100 case FKind.FlatSetFieldNode:
1101 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1102 lhs = fsfn.getDst();
1103 fld = fsfn.getField();
1104 rhs = fsfn.getSrc();
1106 if( shouldAnalysisTrack( fld.getType() ) ) {
1107 boolean strongUpdate = rg.assignTempXFieldFEqualToTempY( lhs, fld, rhs );
1109 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1110 FlatSESEEnterNode seseContaining =
1111 rblockRel.getRBlockStacks( fmContaining, fn ).peek();
1113 effectsAnalysis.analyzeFlatSetFieldNode( fmContaining,
1115 rg, lhs, fld, strongUpdate );
1120 case FKind.FlatElementNode:
1121 FlatElementNode fen = (FlatElementNode) fn;
1124 if( shouldAnalysisTrack( lhs.getType() ) ) {
1126 assert rhs.getType() != null;
1127 assert rhs.getType().isArray();
1129 TypeDescriptor tdElement = rhs.getType().dereference();
1130 FieldDescriptor fdElement = getArrayField( tdElement );
1132 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fdElement );
1134 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1135 FlatSESEEnterNode seseContaining =
1136 rblockRel.getRBlockStacks( fmContaining, fn ).peek();
1138 effectsAnalysis.analyzeFlatFieldNode( fmContaining,
1140 rg, rhs, fdElement );
1145 case FKind.FlatSetElementNode:
1146 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1148 if( arrayReferencees.doesNotCreateNewReaching( fsen ) ) {
1149 // skip this node if it cannot create new reachability paths
1153 lhs = fsen.getDst();
1154 rhs = fsen.getSrc();
1155 if( shouldAnalysisTrack( rhs.getType() ) ) {
1157 assert lhs.getType() != null;
1158 assert lhs.getType().isArray();
1160 TypeDescriptor tdElement = lhs.getType().dereference();
1161 FieldDescriptor fdElement = getArrayField( tdElement );
1163 rg.assignTempXFieldFEqualToTempY( lhs, fdElement, rhs );
1165 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1166 FlatSESEEnterNode seseContaining =
1167 rblockRel.getRBlockStacks( fmContaining, fn ).peek();
1169 effectsAnalysis.analyzeFlatSetFieldNode( fmContaining,
1178 FlatNew fnn = (FlatNew) fn;
1180 if( shouldAnalysisTrack( lhs.getType() ) ) {
1181 AllocSite as = getAllocSiteFromFlatNewPRIVATE( fnn );
1182 rg.assignTempEqualToNewAlloc( lhs, as );
1186 case FKind.FlatSESEEnterNode:
1187 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1188 FlatSESEEnterNode sese = (FlatSESEEnterNode) fn;
1189 rg.taintInSetVars( sese );
1193 case FKind.FlatSESEExitNode:
1194 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1195 FlatSESEExitNode fsexn = (FlatSESEExitNode) fn;
1196 rg.removeInContextTaints( fsexn.getFlatEnter() );
1201 case FKind.FlatCall: {
1202 Descriptor mdCaller;
1203 if( fmContaining.getMethod() != null ){
1204 mdCaller = fmContaining.getMethod();
1206 mdCaller = fmContaining.getTask();
1208 FlatCall fc = (FlatCall) fn;
1209 MethodDescriptor mdCallee = fc.getMethod();
1210 FlatMethod fmCallee = state.getMethodFlat( mdCallee );
1213 boolean debugCallSite =
1214 mdCaller.getSymbol().equals( state.DISJOINTDEBUGCALLER ) &&
1215 mdCallee.getSymbol().equals( state.DISJOINTDEBUGCALLEE );
1217 boolean writeDebugDOTs = false;
1218 boolean stopAfter = false;
1219 if( debugCallSite ) {
1220 ++ReachGraph.debugCallSiteVisitCounter;
1221 System.out.println( " $$$ Debug call site visit "+
1222 ReachGraph.debugCallSiteVisitCounter+
1226 (ReachGraph.debugCallSiteVisitCounter >=
1227 ReachGraph.debugCallSiteVisitStartCapture) &&
1229 (ReachGraph.debugCallSiteVisitCounter <
1230 ReachGraph.debugCallSiteVisitStartCapture +
1231 ReachGraph.debugCallSiteNumVisitsToCapture)
1233 writeDebugDOTs = true;
1234 System.out.println( " $$$ Capturing this call site visit $$$" );
1235 if( ReachGraph.debugCallSiteStopAfter &&
1236 (ReachGraph.debugCallSiteVisitCounter ==
1237 ReachGraph.debugCallSiteVisitStartCapture +
1238 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
1246 // calculate the heap this call site can reach--note this is
1247 // not used for the current call site transform, we are
1248 // grabbing this heap model for future analysis of the callees,
1249 // so if different results emerge we will return to this site
1250 ReachGraph heapForThisCall_old =
1251 getIHMcontribution( mdCallee, fc );
1253 // the computation of the callee-reachable heap
1254 // is useful for making the callee starting point
1255 // and for applying the call site transfer function
1256 Set<Integer> callerNodeIDsCopiedToCallee =
1257 new HashSet<Integer>();
1259 ReachGraph heapForThisCall_cur =
1260 rg.makeCalleeView( fc,
1262 callerNodeIDsCopiedToCallee,
1266 if( !heapForThisCall_cur.equals( heapForThisCall_old ) ) {
1267 // if heap at call site changed, update the contribution,
1268 // and reschedule the callee for analysis
1269 addIHMcontribution( mdCallee, fc, heapForThisCall_cur );
1271 // map a FlatCall to its enclosing method/task descriptor
1272 // so we can write that info out later
1273 fc2enclosing.put( fc, mdCaller );
1275 if( state.DISJOINTDEBUGSCHEDULING ) {
1276 System.out.println( " context changed, scheduling callee: "+mdCallee );
1279 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1280 calleesToEnqueue.add( mdCallee );
1282 enqueue( mdCallee );
1287 // the transformation for a call site should update the
1288 // current heap abstraction with any effects from the callee,
1289 // or if the method is virtual, the effects from any possible
1290 // callees, so find the set of callees...
1291 Set<MethodDescriptor> setPossibleCallees;
1292 if( determinismDesired ) {
1293 // use an ordered set
1294 setPossibleCallees = new TreeSet<MethodDescriptor>( dComp );
1296 // otherwise use a speedy hashset
1297 setPossibleCallees = new HashSet<MethodDescriptor>();
1300 if( mdCallee.isStatic() ) {
1301 setPossibleCallees.add( mdCallee );
1303 TypeDescriptor typeDesc = fc.getThis().getType();
1304 setPossibleCallees.addAll( callGraph.getMethods( mdCallee,
1309 ReachGraph rgMergeOfEffects = new ReachGraph();
1311 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1312 while( mdItr.hasNext() ) {
1313 MethodDescriptor mdPossible = mdItr.next();
1314 FlatMethod fmPossible = state.getMethodFlat( mdPossible );
1316 addDependent( mdPossible, // callee
1319 // don't alter the working graph (rg) until we compute a
1320 // result for every possible callee, merge them all together,
1321 // then set rg to that
1322 ReachGraph rgCopy = new ReachGraph();
1325 ReachGraph rgEffect = getPartial( mdPossible );
1327 if( rgEffect == null ) {
1328 // if this method has never been analyzed just schedule it
1329 // for analysis and skip over this call site for now
1330 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1331 calleesToEnqueue.add( mdPossible );
1333 enqueue( mdPossible );
1336 if( state.DISJOINTDEBUGSCHEDULING ) {
1337 System.out.println( " callee hasn't been analyzed, scheduling: "+mdPossible );
1342 // calculate the method call transform
1344 Hashtable<Taint, TaintSet> tCallee2tsCaller = null;
1346 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1347 tCallee2tsCaller = new Hashtable<Taint, TaintSet>();
1350 rgCopy.resolveMethodCall( fc,
1353 callerNodeIDsCopiedToCallee,
1358 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1360 FlatSESEEnterNode seseContaining =
1361 rblockRel.getRBlockStacks( fmContaining, fn ).peek();
1363 effectsAnalysis.analyzeFlatCall( fmContaining,
1370 rgMergeOfEffects.merge( rgCopy );
1375 System.out.println( "$$$ Exiting after requested captures of call site. $$$" );
1380 // now that we've taken care of building heap models for
1381 // callee analysis, finish this transformation
1382 rg = rgMergeOfEffects;
1386 case FKind.FlatReturnNode:
1387 FlatReturnNode frn = (FlatReturnNode) fn;
1388 rhs = frn.getReturnTemp();
1389 if( rhs != null && shouldAnalysisTrack( rhs.getType() ) ) {
1390 rg.assignReturnEqualToTemp( rhs );
1392 setRetNodes.add( frn );
1398 // dead variables were removed before the above transfer function
1399 // was applied, so eliminate heap regions and edges that are no
1400 // longer part of the abstractly-live heap graph, and sweep up
1401 // and reachability effects that are altered by the reduction
1402 //rg.abstractGarbageCollect();
1406 // back edges are strictly monotonic
1407 if( pm.isBackEdge( fn ) ) {
1408 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get( fn );
1409 rg.merge( rgPrevResult );
1410 mapBackEdgeToMonotone.put( fn, rg );
1413 // at this point rg should be the correct update
1414 // by an above transfer function, or untouched if
1415 // the flat node type doesn't affect the heap
1421 // this method should generate integers strictly greater than zero!
1422 // special "shadow" regions are made from a heap region by negating
1424 static public Integer generateUniqueHeapRegionNodeID() {
1426 return new Integer( uniqueIDcount );
1431 static public FieldDescriptor getArrayField( TypeDescriptor tdElement ) {
1432 FieldDescriptor fdElement = mapTypeToArrayField.get( tdElement );
1433 if( fdElement == null ) {
1434 fdElement = new FieldDescriptor( new Modifiers( Modifiers.PUBLIC ),
1436 arrayElementFieldName,
1439 mapTypeToArrayField.put( tdElement, fdElement );
1446 private void writeFinalGraphs() {
1447 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
1448 Iterator itr = entrySet.iterator();
1449 while( itr.hasNext() ) {
1450 Map.Entry me = (Map.Entry) itr.next();
1451 Descriptor d = (Descriptor) me.getKey();
1452 ReachGraph rg = (ReachGraph) me.getValue();
1454 rg.writeGraph( "COMPLETE"+d,
1455 true, // write labels (variables)
1456 true, // selectively hide intermediate temp vars
1457 true, // prune unreachable heap regions
1458 false, // hide reachability altogether
1459 true, // hide subset reachability states
1460 true, // hide predicates
1461 false ); // hide edge taints
1465 private void writeFinalIHMs() {
1466 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
1467 while( d2IHMsItr.hasNext() ) {
1468 Map.Entry me1 = (Map.Entry) d2IHMsItr.next();
1469 Descriptor d = (Descriptor) me1.getKey();
1470 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>) me1.getValue();
1472 Iterator fc2rgItr = IHMs.entrySet().iterator();
1473 while( fc2rgItr.hasNext() ) {
1474 Map.Entry me2 = (Map.Entry) fc2rgItr.next();
1475 FlatCall fc = (FlatCall) me2.getKey();
1476 ReachGraph rg = (ReachGraph) me2.getValue();
1478 rg.writeGraph( "IHMPARTFOR"+d+"FROM"+fc2enclosing.get( fc )+fc,
1479 true, // write labels (variables)
1480 true, // selectively hide intermediate temp vars
1481 true, // hide reachability altogether
1482 true, // prune unreachable heap regions
1483 true, // hide subset reachability states
1484 false, // hide predicates
1485 true ); // hide edge taints
1490 private void writeInitialContexts() {
1491 Set entrySet = mapDescriptorToInitialContext.entrySet();
1492 Iterator itr = entrySet.iterator();
1493 while( itr.hasNext() ) {
1494 Map.Entry me = (Map.Entry) itr.next();
1495 Descriptor d = (Descriptor) me.getKey();
1496 ReachGraph rg = (ReachGraph) me.getValue();
1498 rg.writeGraph( "INITIAL"+d,
1499 true, // write labels (variables)
1500 true, // selectively hide intermediate temp vars
1501 true, // prune unreachable heap regions
1502 false, // hide all reachability
1503 true, // hide subset reachability states
1504 true, // hide predicates
1505 false );// hide edge taints
1510 protected ReachGraph getPartial( Descriptor d ) {
1511 return mapDescriptorToCompleteReachGraph.get( d );
1514 protected void setPartial( Descriptor d, ReachGraph rg ) {
1515 mapDescriptorToCompleteReachGraph.put( d, rg );
1517 // when the flag for writing out every partial
1518 // result is set, we should spit out the graph,
1519 // but in order to give it a unique name we need
1520 // to track how many partial results for this
1521 // descriptor we've already written out
1522 if( writeAllIncrementalDOTs ) {
1523 if( !mapDescriptorToNumUpdates.containsKey( d ) ) {
1524 mapDescriptorToNumUpdates.put( d, new Integer( 0 ) );
1526 Integer n = mapDescriptorToNumUpdates.get( d );
1528 rg.writeGraph( d+"COMPLETE"+String.format( "%05d", n ),
1529 true, // write labels (variables)
1530 true, // selectively hide intermediate temp vars
1531 true, // prune unreachable heap regions
1532 false, // hide all reachability
1533 true, // hide subset reachability states
1534 false, // hide predicates
1535 false); // hide edge taints
1537 mapDescriptorToNumUpdates.put( d, n + 1 );
1543 // return just the allocation site associated with one FlatNew node
1544 protected AllocSite getAllocSiteFromFlatNewPRIVATE( FlatNew fnew ) {
1546 if( !mapFlatNewToAllocSite.containsKey( fnew ) ) {
1547 AllocSite as = AllocSite.factory( allocationDepth,
1549 fnew.getDisjointId(),
1553 // the newest nodes are single objects
1554 for( int i = 0; i < allocationDepth; ++i ) {
1555 Integer id = generateUniqueHeapRegionNodeID();
1556 as.setIthOldest( i, id );
1557 mapHrnIdToAllocSite.put( id, as );
1560 // the oldest node is a summary node
1561 as.setSummary( generateUniqueHeapRegionNodeID() );
1563 mapFlatNewToAllocSite.put( fnew, as );
1566 return mapFlatNewToAllocSite.get( fnew );
1570 public static boolean shouldAnalysisTrack( TypeDescriptor type ) {
1571 // don't track primitive types, but an array
1572 // of primitives is heap memory
1573 if( type.isImmutable() ) {
1574 return type.isArray();
1577 // everything else is an object
1581 protected int numMethodsAnalyzed() {
1582 return descriptorsToAnalyze.size();
1589 // Take in source entry which is the program's compiled entry and
1590 // create a new analysis entry, a method that takes no parameters
1591 // and appears to allocate the command line arguments and call the
1592 // source entry with them. The purpose of this analysis entry is
1593 // to provide a top-level method context with no parameters left.
1594 protected void makeAnalysisEntryMethod( MethodDescriptor mdSourceEntry ) {
1596 Modifiers mods = new Modifiers();
1597 mods.addModifier( Modifiers.PUBLIC );
1598 mods.addModifier( Modifiers.STATIC );
1600 TypeDescriptor returnType =
1601 new TypeDescriptor( TypeDescriptor.VOID );
1603 this.mdAnalysisEntry =
1604 new MethodDescriptor( mods,
1606 "analysisEntryMethod"
1609 TempDescriptor cmdLineArgs =
1610 new TempDescriptor( "args",
1611 mdSourceEntry.getParamType( 0 )
1615 new FlatNew( mdSourceEntry.getParamType( 0 ),
1620 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
1621 sourceEntryArgs[0] = cmdLineArgs;
1624 new FlatCall( mdSourceEntry,
1630 FlatReturnNode frn = new FlatReturnNode( null );
1632 FlatExit fe = new FlatExit();
1634 this.fmAnalysisEntry =
1635 new FlatMethod( mdAnalysisEntry,
1639 this.fmAnalysisEntry.addNext( fn );
1646 protected LinkedList<Descriptor> topologicalSort( Set<Descriptor> toSort ) {
1648 Set<Descriptor> discovered;
1650 if( determinismDesired ) {
1651 // use an ordered set
1652 discovered = new TreeSet<Descriptor>( dComp );
1654 // otherwise use a speedy hashset
1655 discovered = new HashSet<Descriptor>();
1658 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
1660 Iterator<Descriptor> itr = toSort.iterator();
1661 while( itr.hasNext() ) {
1662 Descriptor d = itr.next();
1664 if( !discovered.contains( d ) ) {
1665 dfsVisit( d, toSort, sorted, discovered );
1672 // While we're doing DFS on call graph, remember
1673 // dependencies for efficient queuing of methods
1674 // during interprocedural analysis:
1676 // a dependent of a method decriptor d for this analysis is:
1677 // 1) a method or task that invokes d
1678 // 2) in the descriptorsToAnalyze set
1679 protected void dfsVisit( Descriptor d,
1680 Set <Descriptor> toSort,
1681 LinkedList<Descriptor> sorted,
1682 Set <Descriptor> discovered ) {
1683 discovered.add( d );
1685 // only methods have callers, tasks never do
1686 if( d instanceof MethodDescriptor ) {
1688 MethodDescriptor md = (MethodDescriptor) d;
1690 // the call graph is not aware that we have a fabricated
1691 // analysis entry that calls the program source's entry
1692 if( md == mdSourceEntry ) {
1693 if( !discovered.contains( mdAnalysisEntry ) ) {
1694 addDependent( mdSourceEntry, // callee
1695 mdAnalysisEntry // caller
1697 dfsVisit( mdAnalysisEntry, toSort, sorted, discovered );
1701 // otherwise call graph guides DFS
1702 Iterator itr = callGraph.getCallerSet( md ).iterator();
1703 while( itr.hasNext() ) {
1704 Descriptor dCaller = (Descriptor) itr.next();
1706 // only consider callers in the original set to analyze
1707 if( !toSort.contains( dCaller ) ) {
1711 if( !discovered.contains( dCaller ) ) {
1712 addDependent( md, // callee
1716 dfsVisit( dCaller, toSort, sorted, discovered );
1721 // for leaf-nodes last now!
1722 sorted.addLast( d );
1726 protected void enqueue( Descriptor d ) {
1728 if( !descriptorsToVisitSet.contains( d ) ) {
1730 if( state.DISJOINTDVISITSTACK ||
1731 state.DISJOINTDVISITSTACKEESONTOP
1733 descriptorsToVisitStack.add( d );
1735 } else if( state.DISJOINTDVISITPQUE ) {
1736 Integer priority = mapDescriptorToPriority.get( d );
1737 descriptorsToVisitQ.add( new DescriptorQWrapper( priority,
1742 descriptorsToVisitSet.add( d );
1747 // a dependent of a method decriptor d for this analysis is:
1748 // 1) a method or task that invokes d
1749 // 2) in the descriptorsToAnalyze set
1750 protected void addDependent( Descriptor callee, Descriptor caller ) {
1751 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
1752 if( deps == null ) {
1753 deps = new HashSet<Descriptor>();
1756 mapDescriptorToSetDependents.put( callee, deps );
1759 protected Set<Descriptor> getDependents( Descriptor callee ) {
1760 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
1761 if( deps == null ) {
1762 deps = new HashSet<Descriptor>();
1763 mapDescriptorToSetDependents.put( callee, deps );
1769 public Hashtable<FlatCall, ReachGraph> getIHMcontributions( Descriptor d ) {
1771 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1772 mapDescriptorToIHMcontributions.get( d );
1774 if( heapsFromCallers == null ) {
1775 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
1776 mapDescriptorToIHMcontributions.put( d, heapsFromCallers );
1779 return heapsFromCallers;
1782 public ReachGraph getIHMcontribution( Descriptor d,
1785 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1786 getIHMcontributions( d );
1788 if( !heapsFromCallers.containsKey( fc ) ) {
1792 return heapsFromCallers.get( fc );
1796 public void addIHMcontribution( Descriptor d,
1800 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1801 getIHMcontributions( d );
1803 heapsFromCallers.put( fc, rg );
1807 private AllocSite createParameterAllocSite( ReachGraph rg,
1808 TempDescriptor tempDesc,
1814 flatNew = new FlatNew( tempDesc.getType(), // type
1815 tempDesc, // param temp
1816 false, // global alloc?
1817 "param"+tempDesc // disjoint site ID string
1820 flatNew = new FlatNew( tempDesc.getType(), // type
1821 tempDesc, // param temp
1822 false, // global alloc?
1823 null // disjoint site ID string
1827 // create allocation site
1828 AllocSite as = AllocSite.factory( allocationDepth,
1830 flatNew.getDisjointId(),
1833 for (int i = 0; i < allocationDepth; ++i) {
1834 Integer id = generateUniqueHeapRegionNodeID();
1835 as.setIthOldest(i, id);
1836 mapHrnIdToAllocSite.put(id, as);
1838 // the oldest node is a summary node
1839 as.setSummary( generateUniqueHeapRegionNodeID() );
1847 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc){
1849 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
1850 if(!typeDesc.isImmutable()){
1851 ClassDescriptor classDesc = typeDesc.getClassDesc();
1852 for (Iterator it = classDesc.getFields(); it.hasNext();) {
1853 FieldDescriptor field = (FieldDescriptor) it.next();
1854 TypeDescriptor fieldType = field.getType();
1855 if (shouldAnalysisTrack( fieldType )) {
1856 fieldSet.add(field);
1864 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha ){
1866 int dimCount=fd.getType().getArrayCount();
1867 HeapRegionNode prevNode=null;
1868 HeapRegionNode arrayEntryNode=null;
1869 for(int i=dimCount;i>0;i--){
1870 TypeDescriptor typeDesc=fd.getType().dereference();//hack to get instance of type desc
1871 typeDesc.setArrayCount(i);
1872 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
1873 HeapRegionNode hrnSummary ;
1874 if(!mapToExistingNode.containsKey(typeDesc)){
1879 as = createParameterAllocSite(rg, tempDesc, false);
1881 // make a new reference to allocated node
1883 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
1884 false, // single object?
1886 false, // out-of-context?
1887 as.getType(), // type
1888 as, // allocation site
1889 alpha, // inherent reach
1890 alpha, // current reach
1891 ExistPredSet.factory(rg.predTrue), // predicates
1892 tempDesc.toString() // description
1894 rg.id2hrn.put(as.getSummary(),hrnSummary);
1896 mapToExistingNode.put(typeDesc, hrnSummary);
1898 hrnSummary=mapToExistingNode.get(typeDesc);
1902 // make a new reference between new summary node and source
1903 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
1906 fd.getSymbol(), // field name
1908 ExistPredSet.factory(rg.predTrue), // predicates
1912 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
1913 prevNode=hrnSummary;
1914 arrayEntryNode=hrnSummary;
1916 // make a new reference between summary nodes of array
1917 RefEdge edgeToSummary = new RefEdge(prevNode, // source
1920 arrayElementFieldName, // field name
1922 ExistPredSet.factory(rg.predTrue), // predicates
1926 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
1927 prevNode=hrnSummary;
1932 // create a new obj node if obj has at least one non-primitive field
1933 TypeDescriptor type=fd.getType();
1934 if(getFieldSetTobeAnalyzed(type).size()>0){
1935 TypeDescriptor typeDesc=type.dereference();
1936 typeDesc.setArrayCount(0);
1937 if(!mapToExistingNode.containsKey(typeDesc)){
1938 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
1939 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
1940 // make a new reference to allocated node
1941 HeapRegionNode hrnSummary =
1942 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
1943 false, // single object?
1945 false, // out-of-context?
1947 as, // allocation site
1948 alpha, // inherent reach
1949 alpha, // current reach
1950 ExistPredSet.factory(rg.predTrue), // predicates
1951 tempDesc.toString() // description
1953 rg.id2hrn.put(as.getSummary(),hrnSummary);
1954 mapToExistingNode.put(typeDesc, hrnSummary);
1955 RefEdge edgeToSummary = new RefEdge(prevNode, // source
1958 arrayElementFieldName, // field name
1960 ExistPredSet.factory(rg.predTrue), // predicates
1963 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
1964 prevNode=hrnSummary;
1966 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
1967 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null){
1968 RefEdge edgeToSummary = new RefEdge(prevNode, // source
1971 arrayElementFieldName, // field name
1973 ExistPredSet.factory(rg.predTrue), // predicates
1976 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
1978 prevNode=hrnSummary;
1982 map.put(arrayEntryNode, prevNode);
1983 return arrayEntryNode;
1986 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
1987 ReachGraph rg = new ReachGraph();
1988 TaskDescriptor taskDesc = fm.getTask();
1990 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
1991 Descriptor paramDesc = taskDesc.getParameter(idx);
1992 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
1994 // setup data structure
1995 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
1996 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
1997 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
1998 new Hashtable<TypeDescriptor, HeapRegionNode>();
1999 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
2000 new Hashtable<HeapRegionNode, HeapRegionNode>();
2001 Set<String> doneSet = new HashSet<String>();
2003 TempDescriptor tempDesc = fm.getParameter(idx);
2005 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
2006 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
2007 Integer idNewest = as.getIthOldest(0);
2008 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
2010 // make a new reference to allocated node
2011 RefEdge edgeNew = new RefEdge(lnX, // source
2013 taskDesc.getParamType(idx), // type
2015 hrnNewest.getAlpha(), // beta
2016 ExistPredSet.factory(rg.predTrue), // predicates
2019 rg.addRefEdge(lnX, hrnNewest, edgeNew);
2021 // set-up a work set for class field
2022 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
2023 for (Iterator it = classDesc.getFields(); it.hasNext();) {
2024 FieldDescriptor fd = (FieldDescriptor) it.next();
2025 TypeDescriptor fieldType = fd.getType();
2026 if (shouldAnalysisTrack( fieldType )) {
2027 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
2028 newMap.put(hrnNewest, fd);
2029 workSet.add(newMap);
2033 int uniqueIdentifier = 0;
2034 while (!workSet.isEmpty()) {
2035 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
2037 workSet.remove(map);
2039 Set<HeapRegionNode> key = map.keySet();
2040 HeapRegionNode srcHRN = key.iterator().next();
2041 FieldDescriptor fd = map.get(srcHRN);
2042 TypeDescriptor type = fd.getType();
2043 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
2045 if (!doneSet.contains(doneSetIdentifier)) {
2046 doneSet.add(doneSetIdentifier);
2047 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
2048 // create new summary Node
2049 TempDescriptor td = new TempDescriptor("temp"
2050 + uniqueIdentifier, type);
2052 AllocSite allocSite;
2053 if(type.equals(paramTypeDesc)){
2054 //corresponding allocsite has already been created for a parameter variable.
2057 allocSite = createParameterAllocSite(rg, td, false);
2059 String strDesc = allocSite.toStringForDOT()
2061 TypeDescriptor allocType=allocSite.getType();
2063 HeapRegionNode hrnSummary;
2064 if(allocType.isArray() && allocType.getArrayCount()>0){
2065 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
2068 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
2069 false, // single object?
2071 false, // out-of-context?
2072 allocSite.getType(), // type
2073 allocSite, // allocation site
2074 hrnNewest.getAlpha(), // inherent reach
2075 hrnNewest.getAlpha(), // current reach
2076 ExistPredSet.factory(rg.predTrue), // predicates
2077 strDesc // description
2079 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
2081 // make a new reference to summary node
2082 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2085 fd.getSymbol(), // field name
2086 hrnNewest.getAlpha(), // beta
2087 ExistPredSet.factory(rg.predTrue), // predicates
2091 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2095 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2097 // set-up a work set for fields of the class
2098 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2099 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2101 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2103 HeapRegionNode newDstHRN;
2104 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)){
2105 //related heap region node is already exsited.
2106 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2108 newDstHRN=hrnSummary;
2110 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2111 if(!doneSet.contains(doneSetIdentifier)){
2112 // add new work item
2113 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2114 new HashMap<HeapRegionNode, FieldDescriptor>();
2115 newMap.put(newDstHRN, fieldDescriptor);
2116 workSet.add(newMap);
2121 // if there exists corresponding summary node
2122 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2124 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2126 fd.getType(), // type
2127 fd.getSymbol(), // field name
2128 srcHRN.getAlpha(), // beta
2129 ExistPredSet.factory(rg.predTrue), // predicates
2132 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2138 // debugSnapshot(rg, fm, true);
2142 // return all allocation sites in the method (there is one allocation
2143 // site per FlatNew node in a method)
2144 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2145 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2146 buildAllocationSiteSet(d);
2149 return mapDescriptorToAllocSiteSet.get(d);
2153 private void buildAllocationSiteSet(Descriptor d) {
2154 HashSet<AllocSite> s = new HashSet<AllocSite>();
2157 if( d instanceof MethodDescriptor ) {
2158 fm = state.getMethodFlat( (MethodDescriptor) d);
2160 assert d instanceof TaskDescriptor;
2161 fm = state.getMethodFlat( (TaskDescriptor) d);
2163 pm.analyzeMethod(fm);
2165 // visit every node in this FlatMethod's IR graph
2166 // and make a set of the allocation sites from the
2167 // FlatNew node's visited
2168 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2169 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2172 while( !toVisit.isEmpty() ) {
2173 FlatNode n = toVisit.iterator().next();
2175 if( n instanceof FlatNew ) {
2176 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2182 for( int i = 0; i < pm.numNext(n); ++i ) {
2183 FlatNode child = pm.getNext(n, i);
2184 if( !visited.contains(child) ) {
2190 mapDescriptorToAllocSiteSet.put(d, s);
2193 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
2195 HashSet<AllocSite> out = new HashSet<AllocSite>();
2196 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2197 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2201 while (!toVisit.isEmpty()) {
2202 Descriptor d = toVisit.iterator().next();
2206 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2207 Iterator asItr = asSet.iterator();
2208 while (asItr.hasNext()) {
2209 AllocSite as = (AllocSite) asItr.next();
2210 if (as.getDisjointAnalysisId() != null) {
2215 // enqueue callees of this method to be searched for
2216 // allocation sites also
2217 Set callees = callGraph.getCalleeSet(d);
2218 if (callees != null) {
2219 Iterator methItr = callees.iterator();
2220 while (methItr.hasNext()) {
2221 MethodDescriptor md = (MethodDescriptor) methItr.next();
2223 if (!visited.contains(md)) {
2234 private HashSet<AllocSite>
2235 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
2237 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
2238 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2239 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2243 // traverse this task and all methods reachable from this task
2244 while( !toVisit.isEmpty() ) {
2245 Descriptor d = toVisit.iterator().next();
2249 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2250 Iterator asItr = asSet.iterator();
2251 while( asItr.hasNext() ) {
2252 AllocSite as = (AllocSite) asItr.next();
2253 TypeDescriptor typed = as.getType();
2254 if( typed != null ) {
2255 ClassDescriptor cd = typed.getClassDesc();
2256 if( cd != null && cd.hasFlags() ) {
2262 // enqueue callees of this method to be searched for
2263 // allocation sites also
2264 Set callees = callGraph.getCalleeSet(d);
2265 if( callees != null ) {
2266 Iterator methItr = callees.iterator();
2267 while( methItr.hasNext() ) {
2268 MethodDescriptor md = (MethodDescriptor) methItr.next();
2270 if( !visited.contains(md) ) {
2280 public Set<Descriptor> getDescriptorsToAnalyze() {
2281 return descriptorsToAnalyze;
2286 // get successive captures of the analysis state, use compiler
2288 boolean takeDebugSnapshots = false;
2289 String descSymbolDebug = null;
2290 boolean stopAfterCapture = false;
2291 int snapVisitCounter = 0;
2292 int snapNodeCounter = 0;
2293 int visitStartCapture = 0;
2294 int numVisitsToCapture = 0;
2297 void debugSnapshot( ReachGraph rg, FlatNode fn, boolean in ) {
2298 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
2306 if( snapVisitCounter >= visitStartCapture ) {
2307 System.out.println( " @@@ snapping visit="+snapVisitCounter+
2308 ", node="+snapNodeCounter+
2312 graphName = String.format( "snap%03d_%04din",
2316 graphName = String.format( "snap%03d_%04dout",
2321 graphName = graphName + fn;
2323 rg.writeGraph( graphName,
2324 true, // write labels (variables)
2325 true, // selectively hide intermediate temp vars
2326 true, // prune unreachable heap regions
2327 false, // hide reachability
2328 true, // hide subset reachability states
2329 true, // hide predicates
2330 false );// hide edge taints