1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
6 import Analysis.OoOJava.RBlockRelationAnalysis;
7 import Analysis.OoOJava.RBlockStatusAnalysis;
10 import IR.Tree.Modifiers;
15 public class DisjointAnalysis {
17 ///////////////////////////////////////////
19 // Public interface to discover possible
20 // sharing in the program under analysis
22 ///////////////////////////////////////////
24 // if an object allocated at the target site may be
25 // reachable from both an object from root1 and an
26 // object allocated at root2, return TRUE
27 public boolean mayBothReachTarget( FlatMethod fm,
32 AllocSite asr1 = getAllocationSiteFromFlatNew( fnRoot1 );
33 AllocSite asr2 = getAllocationSiteFromFlatNew( fnRoot2 );
34 assert asr1.isFlagged();
35 assert asr2.isFlagged();
37 AllocSite ast = getAllocationSiteFromFlatNew( fnTarget );
38 ReachGraph rg = getPartial( fm.getMethod() );
40 return rg.mayBothReachTarget( asr1, asr2, ast );
43 // similar to the method above, return TRUE if ever
44 // more than one object from the root allocation site
45 // may reach an object from the target site
46 public boolean mayManyReachTarget( FlatMethod fm,
50 AllocSite asr = getAllocationSiteFromFlatNew( fnRoot );
51 assert asr.isFlagged();
53 AllocSite ast = getAllocationSiteFromFlatNew( fnTarget );
54 ReachGraph rg = getPartial( fm.getMethod() );
56 return rg.mayManyReachTarget( asr, ast );
62 public HashSet<AllocSite>
63 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
64 checkAnalysisComplete();
65 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
68 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
69 checkAnalysisComplete();
70 return getAllocSiteFromFlatNewPRIVATE(fn);
73 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
74 checkAnalysisComplete();
75 return mapHrnIdToAllocSite.get(id);
78 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
81 checkAnalysisComplete();
82 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
83 FlatMethod fm=state.getMethodFlat(taskOrMethod);
85 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
88 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
89 int paramIndex, AllocSite alloc) {
90 checkAnalysisComplete();
91 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
92 FlatMethod fm=state.getMethodFlat(taskOrMethod);
94 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
97 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
98 AllocSite alloc, int paramIndex) {
99 checkAnalysisComplete();
100 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
101 FlatMethod fm=state.getMethodFlat(taskOrMethod);
103 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
106 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
107 AllocSite alloc1, AllocSite alloc2) {
108 checkAnalysisComplete();
109 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
111 return rg.mayReachSharedObjects(alloc1, alloc2);
114 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
115 checkAnalysisComplete();
119 Iterator<HeapRegionNode> i = s.iterator();
120 while (i.hasNext()) {
121 HeapRegionNode n = i.next();
123 AllocSite as = n.getAllocSite();
125 out += " " + n.toString() + ",\n";
127 out += " " + n.toString() + ": " + as.toStringVerbose()
136 // use the methods given above to check every possible sharing class
137 // between task parameters and flagged allocation sites reachable
139 public void writeAllSharing(String outputFile,
142 boolean tabularOutput,
145 throws java.io.IOException {
146 checkAnalysisComplete();
148 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
150 if (!tabularOutput) {
151 bw.write("Conducting ownership analysis with allocation depth = "
152 + allocationDepth + "\n");
153 bw.write(timeReport + "\n");
158 // look through every task for potential sharing
159 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
160 while (taskItr.hasNext()) {
161 TaskDescriptor td = (TaskDescriptor) taskItr.next();
163 if (!tabularOutput) {
164 bw.write("\n---------" + td + "--------\n");
167 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
169 Set<HeapRegionNode> common;
171 // for each task parameter, check for sharing classes with
172 // other task parameters and every allocation site
173 // reachable from this task
174 boolean foundSomeSharing = false;
176 FlatMethod fm = state.getMethodFlat(td);
177 for (int i = 0; i < fm.numParameters(); ++i) {
179 // skip parameters with types that cannot reference
181 if( !shouldAnalysisTrack( fm.getParameter( i ).getType() ) ) {
185 // for the ith parameter check for sharing classes to all
186 // higher numbered parameters
187 for (int j = i + 1; j < fm.numParameters(); ++j) {
189 // skip parameters with types that cannot reference
191 if( !shouldAnalysisTrack( fm.getParameter( j ).getType() ) ) {
196 common = hasPotentialSharing(td, i, j);
197 if (!common.isEmpty()) {
198 foundSomeSharing = true;
200 if (!tabularOutput) {
201 bw.write("Potential sharing between parameters " + i
202 + " and " + j + ".\n");
203 bw.write(prettyPrintNodeSet(common) + "\n");
208 // for the ith parameter, check for sharing classes against
209 // the set of allocation sites reachable from this
211 Iterator allocItr = allocSites.iterator();
212 while (allocItr.hasNext()) {
213 AllocSite as = (AllocSite) allocItr.next();
214 common = hasPotentialSharing(td, i, as);
215 if (!common.isEmpty()) {
216 foundSomeSharing = true;
218 if (!tabularOutput) {
219 bw.write("Potential sharing between parameter " + i
220 + " and " + as.getFlatNew() + ".\n");
221 bw.write(prettyPrintNodeSet(common) + "\n");
227 // for each allocation site check for sharing classes with
228 // other allocation sites in the context of execution
230 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
231 Iterator allocItr1 = allocSites.iterator();
232 while (allocItr1.hasNext()) {
233 AllocSite as1 = (AllocSite) allocItr1.next();
235 Iterator allocItr2 = allocSites.iterator();
236 while (allocItr2.hasNext()) {
237 AllocSite as2 = (AllocSite) allocItr2.next();
239 if (!outerChecked.contains(as2)) {
240 common = hasPotentialSharing(td, as1, as2);
242 if (!common.isEmpty()) {
243 foundSomeSharing = true;
245 if (!tabularOutput) {
246 bw.write("Potential sharing between "
247 + as1.getFlatNew() + " and "
248 + as2.getFlatNew() + ".\n");
249 bw.write(prettyPrintNodeSet(common) + "\n");
255 outerChecked.add(as1);
258 if (!foundSomeSharing) {
259 if (!tabularOutput) {
260 bw.write("No sharing between flagged objects in Task " + td
268 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
269 + " & " + numMethodsAnalyzed() + " \\\\\n");
271 bw.write("\nNumber sharing classes: "+numSharing);
277 // this version of writeAllSharing is for Java programs that have no tasks
278 public void writeAllSharingJava(String outputFile,
281 boolean tabularOutput,
284 throws java.io.IOException {
285 checkAnalysisComplete();
291 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
293 bw.write("Conducting disjoint reachability analysis with allocation depth = "
294 + allocationDepth + "\n");
295 bw.write(timeReport + "\n\n");
297 boolean foundSomeSharing = false;
299 Descriptor d = typeUtil.getMain();
300 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
302 // for each allocation site check for sharing classes with
303 // other allocation sites in the context of execution
305 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
306 Iterator allocItr1 = allocSites.iterator();
307 while (allocItr1.hasNext()) {
308 AllocSite as1 = (AllocSite) allocItr1.next();
310 Iterator allocItr2 = allocSites.iterator();
311 while (allocItr2.hasNext()) {
312 AllocSite as2 = (AllocSite) allocItr2.next();
314 if (!outerChecked.contains(as2)) {
315 Set<HeapRegionNode> common = hasPotentialSharing(d,
318 if (!common.isEmpty()) {
319 foundSomeSharing = true;
320 bw.write("Potential sharing between "
321 + as1.getDisjointAnalysisId() + " and "
322 + as2.getDisjointAnalysisId() + ".\n");
323 bw.write(prettyPrintNodeSet(common) + "\n");
329 outerChecked.add(as1);
332 if (!foundSomeSharing) {
333 bw.write("No sharing classes between flagged objects found.\n");
335 bw.write("\nNumber sharing classes: "+numSharing);
338 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
343 ///////////////////////////////////////////
345 // end public interface
347 ///////////////////////////////////////////
351 protected void checkAnalysisComplete() {
352 if( !analysisComplete ) {
353 throw new Error("Warning: public interface method called while analysis is running.");
362 // run in faster mode, only when bugs wrung out!
363 public static boolean releaseMode;
365 // use command line option to set this, analysis
366 // should attempt to be deterministic
367 public static boolean determinismDesired;
369 // when we want to enforce determinism in the
370 // analysis we need to sort descriptors rather
371 // than toss them in efficient sets, use this
372 public static DescriptorComparator dComp =
373 new DescriptorComparator();
376 // data from the compiler
378 public CallGraph callGraph;
379 public Liveness liveness;
380 public ArrayReferencees arrayReferencees;
381 public RBlockRelationAnalysis rblockRel;
382 public RBlockStatusAnalysis rblockStatus;
383 public TypeUtil typeUtil;
384 public int allocationDepth;
386 protected boolean doEffectsAnalysis = false;
387 protected EffectsAnalysis effectsAnalysis;
389 // data structure for public interface
390 private Hashtable< Descriptor, HashSet<AllocSite> >
391 mapDescriptorToAllocSiteSet;
394 // for public interface methods to warn that they
395 // are grabbing results during analysis
396 private boolean analysisComplete;
399 // used to identify HeapRegionNode objects
400 // A unique ID equates an object in one
401 // ownership graph with an object in another
402 // graph that logically represents the same
404 // start at 10 and increment to reserve some
405 // IDs for special purposes
406 static protected int uniqueIDcount = 10;
409 // An out-of-scope method created by the
410 // analysis that has no parameters, and
411 // appears to allocate the command line
412 // arguments, then invoke the source code's
413 // main method. The purpose of this is to
414 // provide the analysis with an explicit
415 // top-level context with no parameters
416 protected MethodDescriptor mdAnalysisEntry;
417 protected FlatMethod fmAnalysisEntry;
419 // main method defined by source program
420 protected MethodDescriptor mdSourceEntry;
422 // the set of task and/or method descriptors
423 // reachable in call graph
424 protected Set<Descriptor>
425 descriptorsToAnalyze;
427 // current descriptors to visit in fixed-point
428 // interprocedural analysis, prioritized by
429 // dependency in the call graph
430 protected Stack<Descriptor>
431 descriptorsToVisitStack;
432 protected PriorityQueue<DescriptorQWrapper>
435 // a duplication of the above structure, but
436 // for efficient testing of inclusion
437 protected HashSet<Descriptor>
438 descriptorsToVisitSet;
440 // storage for priorities (doesn't make sense)
441 // to add it to the Descriptor class, just in
443 protected Hashtable<Descriptor, Integer>
444 mapDescriptorToPriority;
446 // when analyzing a method and scheduling more:
447 // remember set of callee's enqueued for analysis
448 // so they can be put on top of the callers in
449 // the stack-visit mode
450 protected Set<Descriptor>
453 // maps a descriptor to its current partial result
454 // from the intraprocedural fixed-point analysis--
455 // then the interprocedural analysis settles, this
456 // mapping will have the final results for each
458 protected Hashtable<Descriptor, ReachGraph>
459 mapDescriptorToCompleteReachGraph;
461 // maps a descriptor to its known dependents: namely
462 // methods or tasks that call the descriptor's method
463 // AND are part of this analysis (reachable from main)
464 protected Hashtable< Descriptor, Set<Descriptor> >
465 mapDescriptorToSetDependents;
467 // if the analysis client wants to flag allocation sites
468 // programmatically, it should provide a set of FlatNew
469 // statements--this may be null if unneeded
470 protected Set<FlatNew> sitesToFlag;
472 // maps each flat new to one analysis abstraction
473 // allocate site object, these exist outside reach graphs
474 protected Hashtable<FlatNew, AllocSite>
475 mapFlatNewToAllocSite;
477 // maps intergraph heap region IDs to intergraph
478 // allocation sites that created them, a redundant
479 // structure for efficiency in some operations
480 protected Hashtable<Integer, AllocSite>
483 // maps a method to its initial heap model (IHM) that
484 // is the set of reachability graphs from every caller
485 // site, all merged together. The reason that we keep
486 // them separate is that any one call site's contribution
487 // to the IHM may changed along the path to the fixed point
488 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
489 mapDescriptorToIHMcontributions;
491 // additionally, keep a mapping from descriptors to the
492 // merged in-coming initial context, because we want this
493 // initial context to be STRICTLY MONOTONIC
494 protected Hashtable<Descriptor, ReachGraph>
495 mapDescriptorToInitialContext;
497 // make the result for back edges analysis-wide STRICTLY
498 // MONOTONIC as well, but notice we use FlatNode as the
499 // key for this map: in case we want to consider other
500 // nodes as back edge's in future implementations
501 protected Hashtable<FlatNode, ReachGraph>
502 mapBackEdgeToMonotone;
505 public static final String arrayElementFieldName = "___element_";
506 static protected Hashtable<TypeDescriptor, FieldDescriptor>
509 // for controlling DOT file output
510 protected boolean writeFinalDOTs;
511 protected boolean writeAllIncrementalDOTs;
513 // supporting DOT output--when we want to write every
514 // partial method result, keep a tally for generating
516 protected Hashtable<Descriptor, Integer>
517 mapDescriptorToNumUpdates;
519 //map task descriptor to initial task parameter
520 protected Hashtable<Descriptor, ReachGraph>
521 mapDescriptorToReachGraph;
523 protected PointerMethod pm;
525 static protected Hashtable<FlatNode, ReachGraph> fn2rg =
526 new Hashtable<FlatNode, ReachGraph>();
528 private Hashtable<FlatCall, Descriptor> fc2enclosing;
531 // allocate various structures that are not local
532 // to a single class method--should be done once
533 protected void allocateStructures() {
535 if( determinismDesired ) {
536 // use an ordered set
537 descriptorsToAnalyze = new TreeSet<Descriptor>( dComp );
539 // otherwise use a speedy hashset
540 descriptorsToAnalyze = new HashSet<Descriptor>();
543 mapDescriptorToCompleteReachGraph =
544 new Hashtable<Descriptor, ReachGraph>();
546 mapDescriptorToNumUpdates =
547 new Hashtable<Descriptor, Integer>();
549 mapDescriptorToSetDependents =
550 new Hashtable< Descriptor, Set<Descriptor> >();
552 mapFlatNewToAllocSite =
553 new Hashtable<FlatNew, AllocSite>();
555 mapDescriptorToIHMcontributions =
556 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
558 mapDescriptorToInitialContext =
559 new Hashtable<Descriptor, ReachGraph>();
561 mapBackEdgeToMonotone =
562 new Hashtable<FlatNode, ReachGraph>();
564 mapHrnIdToAllocSite =
565 new Hashtable<Integer, AllocSite>();
567 mapTypeToArrayField =
568 new Hashtable <TypeDescriptor, FieldDescriptor>();
570 if( state.DISJOINTDVISITSTACK ||
571 state.DISJOINTDVISITSTACKEESONTOP
573 descriptorsToVisitStack =
574 new Stack<Descriptor>();
577 if( state.DISJOINTDVISITPQUE ) {
578 descriptorsToVisitQ =
579 new PriorityQueue<DescriptorQWrapper>();
582 descriptorsToVisitSet =
583 new HashSet<Descriptor>();
585 mapDescriptorToPriority =
586 new Hashtable<Descriptor, Integer>();
589 new HashSet<Descriptor>();
591 mapDescriptorToAllocSiteSet =
592 new Hashtable<Descriptor, HashSet<AllocSite> >();
594 mapDescriptorToReachGraph =
595 new Hashtable<Descriptor, ReachGraph>();
597 pm = new PointerMethod();
599 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
604 // this analysis generates a disjoint reachability
605 // graph for every reachable method in the program
606 public DisjointAnalysis( State s,
611 Set<FlatNew> sitesToFlag,
612 RBlockRelationAnalysis rra,
613 RBlockStatusAnalysis rsa
615 init( s, tu, cg, l, ar, sitesToFlag, rra, rsa );
618 protected void init( State state,
622 ArrayReferencees arrayReferencees,
623 Set<FlatNew> sitesToFlag,
624 RBlockRelationAnalysis rra,
625 RBlockStatusAnalysis rsa
628 analysisComplete = false;
631 this.typeUtil = typeUtil;
632 this.callGraph = callGraph;
633 this.liveness = liveness;
634 this.arrayReferencees = arrayReferencees;
635 this.sitesToFlag = sitesToFlag;
636 this.rblockRel = rra;
637 this.rblockStatus = rsa;
639 if( rblockRel != null ) {
640 doEffectsAnalysis = true;
641 effectsAnalysis = new EffectsAnalysis();
644 this.allocationDepth = state.DISJOINTALLOCDEPTH;
645 this.releaseMode = state.DISJOINTRELEASEMODE;
646 this.determinismDesired = state.DISJOINTDETERMINISM;
648 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL;
649 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL;
651 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
652 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
653 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
654 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
655 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
656 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
657 this.snapNodeCounter = 0; // count nodes from 0
660 state.DISJOINTDVISITSTACK ||
661 state.DISJOINTDVISITPQUE ||
662 state.DISJOINTDVISITSTACKEESONTOP;
663 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
664 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
665 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
667 // set some static configuration for ReachGraphs
668 ReachGraph.allocationDepth = allocationDepth;
669 ReachGraph.typeUtil = typeUtil;
671 ReachGraph.debugCallSiteVisitStartCapture
672 = state.DISJOINTDEBUGCALLVISITTOSTART;
674 ReachGraph.debugCallSiteNumVisitsToCapture
675 = state.DISJOINTDEBUGCALLNUMVISITS;
677 ReachGraph.debugCallSiteStopAfter
678 = state.DISJOINTDEBUGCALLSTOPAFTER;
680 ReachGraph.debugCallSiteVisitCounter
681 = 0; // count visits from 1, is incremented before first visit
685 allocateStructures();
687 double timeStartAnalysis = (double) System.nanoTime();
689 // start interprocedural fixed-point computation
692 } catch( IOException e ) {
693 throw new Error( "IO Exception while writing disjointness analysis output." );
696 analysisComplete=true;
698 double timeEndAnalysis = (double) System.nanoTime();
699 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow( 10.0, 9.0 ) );
700 String treport = String.format( "The reachability analysis took %.3f sec.", dt );
701 String justtime = String.format( "%.2f", dt );
702 System.out.println( treport );
705 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
709 if( state.DISJOINTWRITEIHMS ) {
713 if( state.DISJOINTWRITEINITCONTEXTS ) {
714 writeInitialContexts();
717 if( state.DISJOINTALIASFILE != null ) {
719 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
721 writeAllSharingJava(state.DISJOINTALIASFILE,
724 state.DISJOINTALIASTAB,
729 } catch( IOException e ) {
730 throw new Error( "IO Exception while writing disjointness analysis output." );
733 if( doEffectsAnalysis ) {
734 effectsAnalysis.writeEffects( "effects.txt" );
739 protected boolean moreDescriptorsToVisit() {
740 if( state.DISJOINTDVISITSTACK ||
741 state.DISJOINTDVISITSTACKEESONTOP
743 return !descriptorsToVisitStack.isEmpty();
745 } else if( state.DISJOINTDVISITPQUE ) {
746 return !descriptorsToVisitQ.isEmpty();
749 throw new Error( "Neither descriptor visiting mode set" );
753 // fixed-point computation over the call graph--when a
754 // method's callees are updated, it must be reanalyzed
755 protected void analyzeMethods() throws java.io.IOException {
757 // task or non-task (java) mode determines what the roots
758 // of the call chain are, and establishes the set of methods
759 // reachable from the roots that will be analyzed
762 System.out.println( "Bamboo mode..." );
764 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
765 while( taskItr.hasNext() ) {
766 TaskDescriptor td = (TaskDescriptor) taskItr.next();
767 if( !descriptorsToAnalyze.contains( td ) ) {
768 // add all methods transitively reachable from the
770 descriptorsToAnalyze.add( td );
771 descriptorsToAnalyze.addAll( callGraph.getAllMethods( td ) );
776 System.out.println( "Java mode..." );
778 // add all methods transitively reachable from the
779 // source's main to set for analysis
780 mdSourceEntry = typeUtil.getMain();
781 descriptorsToAnalyze.add( mdSourceEntry );
782 descriptorsToAnalyze.addAll( callGraph.getAllMethods( mdSourceEntry ) );
784 // fabricate an empty calling context that will call
785 // the source's main, but call graph doesn't know
786 // about it, so explicitly add it
787 makeAnalysisEntryMethod( mdSourceEntry );
788 descriptorsToAnalyze.add( mdAnalysisEntry );
792 // now, depending on the interprocedural mode for visiting
793 // methods, set up the needed data structures
795 if( state.DISJOINTDVISITPQUE ) {
797 // topologically sort according to the call graph so
798 // leaf calls are last, helps build contexts up first
799 LinkedList<Descriptor> sortedDescriptors =
800 topologicalSort( descriptorsToAnalyze );
802 // add sorted descriptors to priority queue, and duplicate
803 // the queue as a set for efficiently testing whether some
804 // method is marked for analysis
806 Iterator<Descriptor> dItr;
808 // for the priority queue, give items at the head
809 // of the sorted list a low number (highest priority)
810 while( !sortedDescriptors.isEmpty() ) {
811 Descriptor d = sortedDescriptors.removeFirst();
812 mapDescriptorToPriority.put( d, new Integer( p ) );
813 descriptorsToVisitQ.add( new DescriptorQWrapper( p, d ) );
814 descriptorsToVisitSet.add( d );
818 } else if( state.DISJOINTDVISITSTACK ||
819 state.DISJOINTDVISITSTACKEESONTOP
821 // if we're doing the stack scheme, just throw the root
822 // method or tasks on the stack
824 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
825 while( taskItr.hasNext() ) {
826 TaskDescriptor td = (TaskDescriptor) taskItr.next();
827 descriptorsToVisitStack.add( td );
828 descriptorsToVisitSet.add( td );
832 descriptorsToVisitStack.add( mdAnalysisEntry );
833 descriptorsToVisitSet.add( mdAnalysisEntry );
837 throw new Error( "Unknown method scheduling mode" );
841 // analyze scheduled methods until there are no more to visit
842 while( moreDescriptorsToVisit() ) {
845 if( state.DISJOINTDVISITSTACK ||
846 state.DISJOINTDVISITSTACKEESONTOP
848 d = descriptorsToVisitStack.pop();
850 } else if( state.DISJOINTDVISITPQUE ) {
851 d = descriptorsToVisitQ.poll().getDescriptor();
854 assert descriptorsToVisitSet.contains( d );
855 descriptorsToVisitSet.remove( d );
857 // because the task or method descriptor just extracted
858 // was in the "to visit" set it either hasn't been analyzed
859 // yet, or some method that it depends on has been
860 // updated. Recompute a complete reachability graph for
861 // this task/method and compare it to any previous result.
862 // If there is a change detected, add any methods/tasks
863 // that depend on this one to the "to visit" set.
865 System.out.println( "Analyzing " + d );
867 if( state.DISJOINTDVISITSTACKEESONTOP ) {
868 assert calleesToEnqueue.isEmpty();
871 ReachGraph rg = analyzeMethod( d );
872 ReachGraph rgPrev = getPartial( d );
874 if( !rg.equals( rgPrev ) ) {
877 if( state.DISJOINTDEBUGSCHEDULING ) {
878 System.out.println( " complete graph changed, scheduling callers for analysis:" );
881 // results for d changed, so enqueue dependents
882 // of d for further analysis
883 Iterator<Descriptor> depsItr = getDependents( d ).iterator();
884 while( depsItr.hasNext() ) {
885 Descriptor dNext = depsItr.next();
888 if( state.DISJOINTDEBUGSCHEDULING ) {
889 System.out.println( " "+dNext );
894 // whether or not the method under analysis changed,
895 // we may have some callees that are scheduled for
896 // more analysis, and they should go on the top of
897 // the stack now (in other method-visiting modes they
898 // are already enqueued at this point
899 if( state.DISJOINTDVISITSTACKEESONTOP ) {
900 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
901 while( depsItr.hasNext() ) {
902 Descriptor dNext = depsItr.next();
905 calleesToEnqueue.clear();
911 protected ReachGraph analyzeMethod( Descriptor d )
912 throws java.io.IOException {
914 // get the flat code for this descriptor
916 if( d == mdAnalysisEntry ) {
917 fm = fmAnalysisEntry;
919 fm = state.getMethodFlat( d );
921 pm.analyzeMethod( fm );
923 // intraprocedural work set
924 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
925 flatNodesToVisit.add( fm );
927 // if determinism is desired by client, shadow the
928 // set with a queue to make visit order deterministic
929 Queue<FlatNode> flatNodesToVisitQ = null;
930 if( determinismDesired ) {
931 flatNodesToVisitQ = new LinkedList<FlatNode>();
932 flatNodesToVisitQ.add( fm );
935 // mapping of current partial results
936 Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph =
937 new Hashtable<FlatNode, ReachGraph>();
939 // the set of return nodes partial results that will be combined as
940 // the final, conservative approximation of the entire method
941 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
943 while( !flatNodesToVisit.isEmpty() ) {
946 if( determinismDesired ) {
947 assert !flatNodesToVisitQ.isEmpty();
948 fn = flatNodesToVisitQ.remove();
950 fn = flatNodesToVisit.iterator().next();
952 flatNodesToVisit.remove( fn );
954 // effect transfer function defined by this node,
955 // then compare it to the old graph at this node
956 // to see if anything was updated.
958 ReachGraph rg = new ReachGraph();
959 TaskDescriptor taskDesc;
960 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null){
961 if(mapDescriptorToReachGraph.containsKey(taskDesc)){
962 // retrieve existing reach graph if it is not first time
963 rg=mapDescriptorToReachGraph.get(taskDesc);
965 // create initial reach graph for a task
966 rg=createInitialTaskReachGraph((FlatMethod)fn);
968 mapDescriptorToReachGraph.put(taskDesc, rg);
972 // start by merging all node's parents' graphs
973 for( int i = 0; i < pm.numPrev(fn); ++i ) {
974 FlatNode pn = pm.getPrev(fn,i);
975 if( mapFlatNodeToReachGraph.containsKey( pn ) ) {
976 ReachGraph rgParent = mapFlatNodeToReachGraph.get( pn );
977 rg.merge( rgParent );
982 if( takeDebugSnapshots &&
983 d.getSymbol().equals( descSymbolDebug )
985 debugSnapshot( rg, fn, true );
989 // modify rg with appropriate transfer function
990 rg = analyzeFlatNode( d, fm, fn, setReturns, rg );
993 if( takeDebugSnapshots &&
994 d.getSymbol().equals( descSymbolDebug )
996 debugSnapshot( rg, fn, false );
1001 // if the results of the new graph are different from
1002 // the current graph at this node, replace the graph
1003 // with the update and enqueue the children
1004 ReachGraph rgPrev = mapFlatNodeToReachGraph.get( fn );
1005 if( !rg.equals( rgPrev ) ) {
1006 mapFlatNodeToReachGraph.put( fn, rg );
1008 for( int i = 0; i < pm.numNext( fn ); i++ ) {
1009 FlatNode nn = pm.getNext( fn, i );
1011 flatNodesToVisit.add( nn );
1012 if( determinismDesired ) {
1013 flatNodesToVisitQ.add( nn );
1020 // end by merging all return nodes into a complete
1021 // reach graph that represents all possible heap
1022 // states after the flat method returns
1023 ReachGraph completeGraph = new ReachGraph();
1025 assert !setReturns.isEmpty();
1026 Iterator retItr = setReturns.iterator();
1027 while( retItr.hasNext() ) {
1028 FlatReturnNode frn = (FlatReturnNode) retItr.next();
1030 assert mapFlatNodeToReachGraph.containsKey( frn );
1031 ReachGraph rgRet = mapFlatNodeToReachGraph.get( frn );
1033 completeGraph.merge( rgRet );
1037 if( takeDebugSnapshots &&
1038 d.getSymbol().equals( descSymbolDebug )
1040 // increment that we've visited the debug snap
1041 // method, and reset the node counter
1042 System.out.println( " @@@ debug snap at visit "+snapVisitCounter );
1044 snapNodeCounter = 0;
1046 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
1049 System.out.println( "!!! Stopping analysis after debug snap captures. !!!" );
1055 return completeGraph;
1059 protected ReachGraph
1060 analyzeFlatNode( Descriptor d,
1061 FlatMethod fmContaining,
1063 HashSet<FlatReturnNode> setRetNodes,
1065 ) throws java.io.IOException {
1068 // any variables that are no longer live should be
1069 // nullified in the graph to reduce edges
1070 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1074 FieldDescriptor fld;
1075 TypeDescriptor tdElement;
1076 FieldDescriptor fdElement;
1077 FlatSESEEnterNode sese;
1078 FlatSESEExitNode fsexn;
1080 // use node type to decide what transfer function
1081 // to apply to the reachability graph
1082 switch( fn.kind() ) {
1084 case FKind.FlatMethod: {
1085 // construct this method's initial heap model (IHM)
1086 // since we're working on the FlatMethod, we know
1087 // the incoming ReachGraph 'rg' is empty
1089 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1090 getIHMcontributions( d );
1092 Set entrySet = heapsFromCallers.entrySet();
1093 Iterator itr = entrySet.iterator();
1094 while( itr.hasNext() ) {
1095 Map.Entry me = (Map.Entry) itr.next();
1096 FlatCall fc = (FlatCall) me.getKey();
1097 ReachGraph rgContrib = (ReachGraph) me.getValue();
1099 assert fc.getMethod().equals( d );
1101 rg.merge( rgContrib );
1104 // additionally, we are enforcing STRICT MONOTONICITY for the
1105 // method's initial context, so grow the context by whatever
1106 // the previously computed context was, and put the most
1107 // up-to-date context back in the map
1108 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get( d );
1109 rg.merge( rgPrevContext );
1110 mapDescriptorToInitialContext.put( d, rg );
1114 case FKind.FlatOpNode:
1115 FlatOpNode fon = (FlatOpNode) fn;
1116 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1117 lhs = fon.getDest();
1118 rhs = fon.getLeft();
1120 // before transfer, do effects analysis support
1121 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1122 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1123 // x gets status of y
1124 if(!rg.isAccessible(rhs)){
1125 rg.makeInaccessible(lhs);
1131 rg.assignTempXEqualToTempY( lhs, rhs );
1135 case FKind.FlatCastNode:
1136 FlatCastNode fcn = (FlatCastNode) fn;
1140 TypeDescriptor td = fcn.getType();
1143 // before transfer, do effects analysis support
1144 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1145 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1146 // x gets status of y
1147 if(!rg.isAccessible(rhs)){
1148 rg.makeInaccessible(lhs);
1154 rg.assignTempXEqualToCastedTempY( lhs, rhs, td );
1157 case FKind.FlatFieldNode:
1158 FlatFieldNode ffn = (FlatFieldNode) fn;
1162 fld = ffn.getField();
1164 // before graph transform, possible inject
1165 // a stall-site taint
1166 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1168 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1169 // x=y.f, stall y if not accessible
1170 // contributes read effects on stall site of y
1171 if(!rg.isAccessible(rhs)) {
1172 rg.taintStallSite(fn, rhs);
1175 // after this, x and y are accessbile.
1176 rg.makeAccessible(lhs);
1177 rg.makeAccessible(rhs);
1181 if( shouldAnalysisTrack( fld.getType() ) ) {
1183 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fld );
1186 // after transfer, use updated graph to
1187 // do effects analysis
1188 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1189 effectsAnalysis.analyzeFlatFieldNode( rg, rhs, fld );
1193 case FKind.FlatSetFieldNode:
1194 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1196 lhs = fsfn.getDst();
1197 fld = fsfn.getField();
1198 rhs = fsfn.getSrc();
1200 boolean strongUpdate = false;
1202 // before transfer func, possibly inject
1203 // stall-site taints
1204 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1206 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1207 // x.y=f , stall x and y if they are not accessible
1208 // also contribute write effects on stall site of x
1209 if(!rg.isAccessible(lhs)) {
1210 rg.taintStallSite(fn, lhs);
1213 if(!rg.isAccessible(rhs)) {
1214 rg.taintStallSite(fn, rhs);
1217 // accessible status update
1218 rg.makeAccessible(lhs);
1219 rg.makeAccessible(rhs);
1223 if( shouldAnalysisTrack( fld.getType() ) ) {
1225 strongUpdate = rg.assignTempXFieldFEqualToTempY( lhs, fld, rhs );
1228 // use transformed graph to do effects analysis
1229 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1230 effectsAnalysis.analyzeFlatSetFieldNode( rg, lhs, fld, strongUpdate );
1234 case FKind.FlatElementNode:
1235 FlatElementNode fen = (FlatElementNode) fn;
1240 assert rhs.getType() != null;
1241 assert rhs.getType().isArray();
1243 tdElement = rhs.getType().dereference();
1244 fdElement = getArrayField( tdElement );
1246 // before transfer func, possibly inject
1248 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1250 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1251 // x=y.f, stall y if not accessible
1252 // contributes read effects on stall site of y
1253 // after this, x and y are accessbile.
1254 if(!rg.isAccessible(rhs)) {
1255 rg.taintStallSite(fn, rhs);
1258 rg.makeAccessible(lhs);
1259 rg.makeAccessible(rhs);
1263 if( shouldAnalysisTrack( lhs.getType() ) ) {
1265 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fdElement );
1268 // use transformed graph to do effects analysis
1269 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1270 effectsAnalysis.analyzeFlatFieldNode( rg, rhs, fdElement );
1274 case FKind.FlatSetElementNode:
1275 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1277 lhs = fsen.getDst();
1278 rhs = fsen.getSrc();
1280 assert lhs.getType() != null;
1281 assert lhs.getType().isArray();
1283 tdElement = lhs.getType().dereference();
1284 fdElement = getArrayField( tdElement );
1286 // before transfer func, possibly inject
1287 // stall-site taints
1288 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1290 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1291 // x.y=f , stall x and y if they are not accessible
1292 // also contribute write effects on stall site of x
1293 if(!rg.isAccessible(lhs)) {
1294 rg.taintStallSite(fn, lhs);
1297 if(!rg.isAccessible(rhs)) {
1298 rg.taintStallSite(fn, rhs);
1301 // accessible status update
1302 rg.makeAccessible(lhs);
1303 rg.makeAccessible(rhs);
1307 if( shouldAnalysisTrack( rhs.getType() ) ) {
1308 // transfer func, BUT
1309 // skip this node if it cannot create new reachability paths
1310 if( !arrayReferencees.doesNotCreateNewReaching( fsen ) ) {
1311 rg.assignTempXFieldFEqualToTempY( lhs, fdElement, rhs );
1315 // use transformed graph to do effects analysis
1316 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1317 effectsAnalysis.analyzeFlatSetFieldNode( rg, lhs, fdElement,
1323 FlatNew fnn = (FlatNew) fn;
1325 if( shouldAnalysisTrack( lhs.getType() ) ) {
1326 AllocSite as = getAllocSiteFromFlatNewPRIVATE( fnn );
1328 // before transform, support effects analysis
1329 if (doEffectsAnalysis && fmContaining != fmAnalysisEntry) {
1330 if (rblockStatus.isInCriticalRegion(fmContaining, fn)) {
1331 // after creating new object, lhs is accessible
1332 rg.makeAccessible(lhs);
1337 rg.assignTempEqualToNewAlloc( lhs, as );
1341 case FKind.FlatSESEEnterNode:
1342 sese = (FlatSESEEnterNode) fn;
1344 if( sese.getIsCallerSESEplaceholder() ) {
1345 // ignore these dummy rblocks!
1349 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1351 // always remove ALL stall site taints at enter
1352 rg.removeAllStallSiteTaints();
1354 // inject taints for in-set vars
1355 rg.taintInSetVars( sese );
1359 case FKind.FlatSESEExitNode:
1360 fsexn = (FlatSESEExitNode) fn;
1361 sese = fsexn.getFlatEnter();
1363 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1365 // @ sese exit make all live variables
1366 // inaccessible to later parent statements
1367 rg.makeInaccessible( liveness.getLiveInTemps( fmContaining, fn ) );
1369 // always remove ALL stall site taints at exit
1370 rg.removeAllStallSiteTaints();
1372 // remove in-set var taints for the exiting rblock
1373 rg.removeInContextTaints( sese );
1378 case FKind.FlatCall: {
1379 Descriptor mdCaller;
1380 if( fmContaining.getMethod() != null ){
1381 mdCaller = fmContaining.getMethod();
1383 mdCaller = fmContaining.getTask();
1385 FlatCall fc = (FlatCall) fn;
1386 MethodDescriptor mdCallee = fc.getMethod();
1387 FlatMethod fmCallee = state.getMethodFlat( mdCallee );
1390 boolean debugCallSite =
1391 mdCaller.getSymbol().equals( state.DISJOINTDEBUGCALLER ) &&
1392 mdCallee.getSymbol().equals( state.DISJOINTDEBUGCALLEE );
1394 boolean writeDebugDOTs = false;
1395 boolean stopAfter = false;
1396 if( debugCallSite ) {
1397 ++ReachGraph.debugCallSiteVisitCounter;
1398 System.out.println( " $$$ Debug call site visit "+
1399 ReachGraph.debugCallSiteVisitCounter+
1403 (ReachGraph.debugCallSiteVisitCounter >=
1404 ReachGraph.debugCallSiteVisitStartCapture) &&
1406 (ReachGraph.debugCallSiteVisitCounter <
1407 ReachGraph.debugCallSiteVisitStartCapture +
1408 ReachGraph.debugCallSiteNumVisitsToCapture)
1410 writeDebugDOTs = true;
1411 System.out.println( " $$$ Capturing this call site visit $$$" );
1412 if( ReachGraph.debugCallSiteStopAfter &&
1413 (ReachGraph.debugCallSiteVisitCounter ==
1414 ReachGraph.debugCallSiteVisitStartCapture +
1415 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
1423 // calculate the heap this call site can reach--note this is
1424 // not used for the current call site transform, we are
1425 // grabbing this heap model for future analysis of the callees,
1426 // so if different results emerge we will return to this site
1427 ReachGraph heapForThisCall_old =
1428 getIHMcontribution( mdCallee, fc );
1430 // the computation of the callee-reachable heap
1431 // is useful for making the callee starting point
1432 // and for applying the call site transfer function
1433 Set<Integer> callerNodeIDsCopiedToCallee =
1434 new HashSet<Integer>();
1436 ReachGraph heapForThisCall_cur =
1437 rg.makeCalleeView( fc,
1439 callerNodeIDsCopiedToCallee,
1443 if( !heapForThisCall_cur.equals( heapForThisCall_old ) ) {
1444 // if heap at call site changed, update the contribution,
1445 // and reschedule the callee for analysis
1446 addIHMcontribution( mdCallee, fc, heapForThisCall_cur );
1448 // map a FlatCall to its enclosing method/task descriptor
1449 // so we can write that info out later
1450 fc2enclosing.put( fc, mdCaller );
1452 if( state.DISJOINTDEBUGSCHEDULING ) {
1453 System.out.println( " context changed, scheduling callee: "+mdCallee );
1456 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1457 calleesToEnqueue.add( mdCallee );
1459 enqueue( mdCallee );
1464 // the transformation for a call site should update the
1465 // current heap abstraction with any effects from the callee,
1466 // or if the method is virtual, the effects from any possible
1467 // callees, so find the set of callees...
1468 Set<MethodDescriptor> setPossibleCallees;
1469 if( determinismDesired ) {
1470 // use an ordered set
1471 setPossibleCallees = new TreeSet<MethodDescriptor>( dComp );
1473 // otherwise use a speedy hashset
1474 setPossibleCallees = new HashSet<MethodDescriptor>();
1477 if( mdCallee.isStatic() ) {
1478 setPossibleCallees.add( mdCallee );
1480 TypeDescriptor typeDesc = fc.getThis().getType();
1481 setPossibleCallees.addAll( callGraph.getMethods( mdCallee,
1486 ReachGraph rgMergeOfPossibleCallers = new ReachGraph();
1488 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1489 while( mdItr.hasNext() ) {
1490 MethodDescriptor mdPossible = mdItr.next();
1491 FlatMethod fmPossible = state.getMethodFlat( mdPossible );
1493 addDependent( mdPossible, // callee
1496 // don't alter the working graph (rg) until we compute a
1497 // result for every possible callee, merge them all together,
1498 // then set rg to that
1499 ReachGraph rgPossibleCaller = new ReachGraph();
1500 rgPossibleCaller.merge( rg );
1502 ReachGraph rgPossibleCallee = getPartial( mdPossible );
1504 if( rgPossibleCallee == null ) {
1505 // if this method has never been analyzed just schedule it
1506 // for analysis and skip over this call site for now
1507 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1508 calleesToEnqueue.add( mdPossible );
1510 enqueue( mdPossible );
1513 if( state.DISJOINTDEBUGSCHEDULING ) {
1514 System.out.println( " callee hasn't been analyzed, scheduling: "+mdPossible );
1519 // calculate the method call transform
1520 rgPossibleCaller.resolveMethodCall( fc,
1523 callerNodeIDsCopiedToCallee,
1527 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1528 if( !rgPossibleCallee.isAccessible( ReachGraph.tdReturn ) ) {
1529 rgPossibleCaller.makeInaccessible( fc.getReturnTemp() );
1535 rgMergeOfPossibleCallers.merge( rgPossibleCaller );
1540 System.out.println( "$$$ Exiting after requested captures of call site. $$$" );
1545 // now that we've taken care of building heap models for
1546 // callee analysis, finish this transformation
1547 rg = rgMergeOfPossibleCallers;
1551 case FKind.FlatReturnNode:
1552 FlatReturnNode frn = (FlatReturnNode) fn;
1553 rhs = frn.getReturnTemp();
1555 // before transfer, do effects analysis support
1556 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1557 if(!rg.isAccessible(rhs)){
1558 rg.makeInaccessible(ReachGraph.tdReturn);
1562 if( rhs != null && shouldAnalysisTrack( rhs.getType() ) ) {
1563 rg.assignReturnEqualToTemp( rhs );
1566 setRetNodes.add( frn );
1572 // dead variables were removed before the above transfer function
1573 // was applied, so eliminate heap regions and edges that are no
1574 // longer part of the abstractly-live heap graph, and sweep up
1575 // and reachability effects that are altered by the reduction
1576 //rg.abstractGarbageCollect();
1580 // back edges are strictly monotonic
1581 if( pm.isBackEdge( fn ) ) {
1582 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get( fn );
1583 rg.merge( rgPrevResult );
1584 mapBackEdgeToMonotone.put( fn, rg );
1587 // at this point rg should be the correct update
1588 // by an above transfer function, or untouched if
1589 // the flat node type doesn't affect the heap
1595 // this method should generate integers strictly greater than zero!
1596 // special "shadow" regions are made from a heap region by negating
1598 static public Integer generateUniqueHeapRegionNodeID() {
1600 return new Integer( uniqueIDcount );
1605 static public FieldDescriptor getArrayField( TypeDescriptor tdElement ) {
1606 FieldDescriptor fdElement = mapTypeToArrayField.get( tdElement );
1607 if( fdElement == null ) {
1608 fdElement = new FieldDescriptor( new Modifiers( Modifiers.PUBLIC ),
1610 arrayElementFieldName,
1613 mapTypeToArrayField.put( tdElement, fdElement );
1620 private void writeFinalGraphs() {
1621 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
1622 Iterator itr = entrySet.iterator();
1623 while( itr.hasNext() ) {
1624 Map.Entry me = (Map.Entry) itr.next();
1625 Descriptor d = (Descriptor) me.getKey();
1626 ReachGraph rg = (ReachGraph) me.getValue();
1628 rg.writeGraph( "COMPLETE"+d,
1629 true, // write labels (variables)
1630 true, // selectively hide intermediate temp vars
1631 true, // prune unreachable heap regions
1632 false, // hide reachability altogether
1633 true, // hide subset reachability states
1634 true, // hide predicates
1635 false ); // hide edge taints
1639 private void writeFinalIHMs() {
1640 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
1641 while( d2IHMsItr.hasNext() ) {
1642 Map.Entry me1 = (Map.Entry) d2IHMsItr.next();
1643 Descriptor d = (Descriptor) me1.getKey();
1644 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>) me1.getValue();
1646 Iterator fc2rgItr = IHMs.entrySet().iterator();
1647 while( fc2rgItr.hasNext() ) {
1648 Map.Entry me2 = (Map.Entry) fc2rgItr.next();
1649 FlatCall fc = (FlatCall) me2.getKey();
1650 ReachGraph rg = (ReachGraph) me2.getValue();
1652 rg.writeGraph( "IHMPARTFOR"+d+"FROM"+fc2enclosing.get( fc )+fc,
1653 true, // write labels (variables)
1654 true, // selectively hide intermediate temp vars
1655 true, // hide reachability altogether
1656 true, // prune unreachable heap regions
1657 true, // hide subset reachability states
1658 false, // hide predicates
1659 true ); // hide edge taints
1664 private void writeInitialContexts() {
1665 Set entrySet = mapDescriptorToInitialContext.entrySet();
1666 Iterator itr = entrySet.iterator();
1667 while( itr.hasNext() ) {
1668 Map.Entry me = (Map.Entry) itr.next();
1669 Descriptor d = (Descriptor) me.getKey();
1670 ReachGraph rg = (ReachGraph) me.getValue();
1672 rg.writeGraph( "INITIAL"+d,
1673 true, // write labels (variables)
1674 true, // selectively hide intermediate temp vars
1675 true, // prune unreachable heap regions
1676 false, // hide all reachability
1677 true, // hide subset reachability states
1678 true, // hide predicates
1679 false );// hide edge taints
1684 protected ReachGraph getPartial( Descriptor d ) {
1685 return mapDescriptorToCompleteReachGraph.get( d );
1688 protected void setPartial( Descriptor d, ReachGraph rg ) {
1689 mapDescriptorToCompleteReachGraph.put( d, rg );
1691 // when the flag for writing out every partial
1692 // result is set, we should spit out the graph,
1693 // but in order to give it a unique name we need
1694 // to track how many partial results for this
1695 // descriptor we've already written out
1696 if( writeAllIncrementalDOTs ) {
1697 if( !mapDescriptorToNumUpdates.containsKey( d ) ) {
1698 mapDescriptorToNumUpdates.put( d, new Integer( 0 ) );
1700 Integer n = mapDescriptorToNumUpdates.get( d );
1702 rg.writeGraph( d+"COMPLETE"+String.format( "%05d", n ),
1703 true, // write labels (variables)
1704 true, // selectively hide intermediate temp vars
1705 true, // prune unreachable heap regions
1706 false, // hide all reachability
1707 true, // hide subset reachability states
1708 false, // hide predicates
1709 false); // hide edge taints
1711 mapDescriptorToNumUpdates.put( d, n + 1 );
1717 // return just the allocation site associated with one FlatNew node
1718 protected AllocSite getAllocSiteFromFlatNewPRIVATE( FlatNew fnew ) {
1720 boolean flagProgrammatically = false;
1721 if( sitesToFlag != null && sitesToFlag.contains( fnew ) ) {
1722 flagProgrammatically = true;
1725 if( !mapFlatNewToAllocSite.containsKey( fnew ) ) {
1726 AllocSite as = AllocSite.factory( allocationDepth,
1728 fnew.getDisjointId(),
1729 flagProgrammatically
1732 // the newest nodes are single objects
1733 for( int i = 0; i < allocationDepth; ++i ) {
1734 Integer id = generateUniqueHeapRegionNodeID();
1735 as.setIthOldest( i, id );
1736 mapHrnIdToAllocSite.put( id, as );
1739 // the oldest node is a summary node
1740 as.setSummary( generateUniqueHeapRegionNodeID() );
1742 mapFlatNewToAllocSite.put( fnew, as );
1745 return mapFlatNewToAllocSite.get( fnew );
1749 public static boolean shouldAnalysisTrack( TypeDescriptor type ) {
1750 // don't track primitive types, but an array
1751 // of primitives is heap memory
1752 if( type.isImmutable() ) {
1753 return type.isArray();
1756 // everything else is an object
1760 protected int numMethodsAnalyzed() {
1761 return descriptorsToAnalyze.size();
1768 // Take in source entry which is the program's compiled entry and
1769 // create a new analysis entry, a method that takes no parameters
1770 // and appears to allocate the command line arguments and call the
1771 // source entry with them. The purpose of this analysis entry is
1772 // to provide a top-level method context with no parameters left.
1773 protected void makeAnalysisEntryMethod( MethodDescriptor mdSourceEntry ) {
1775 Modifiers mods = new Modifiers();
1776 mods.addModifier( Modifiers.PUBLIC );
1777 mods.addModifier( Modifiers.STATIC );
1779 TypeDescriptor returnType =
1780 new TypeDescriptor( TypeDescriptor.VOID );
1782 this.mdAnalysisEntry =
1783 new MethodDescriptor( mods,
1785 "analysisEntryMethod"
1788 TempDescriptor cmdLineArgs =
1789 new TempDescriptor( "args",
1790 mdSourceEntry.getParamType( 0 )
1794 new FlatNew( mdSourceEntry.getParamType( 0 ),
1799 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
1800 sourceEntryArgs[0] = cmdLineArgs;
1803 new FlatCall( mdSourceEntry,
1809 FlatReturnNode frn = new FlatReturnNode( null );
1811 FlatExit fe = new FlatExit();
1813 this.fmAnalysisEntry =
1814 new FlatMethod( mdAnalysisEntry,
1818 this.fmAnalysisEntry.addNext( fn );
1825 protected LinkedList<Descriptor> topologicalSort( Set<Descriptor> toSort ) {
1827 Set<Descriptor> discovered;
1829 if( determinismDesired ) {
1830 // use an ordered set
1831 discovered = new TreeSet<Descriptor>( dComp );
1833 // otherwise use a speedy hashset
1834 discovered = new HashSet<Descriptor>();
1837 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
1839 Iterator<Descriptor> itr = toSort.iterator();
1840 while( itr.hasNext() ) {
1841 Descriptor d = itr.next();
1843 if( !discovered.contains( d ) ) {
1844 dfsVisit( d, toSort, sorted, discovered );
1851 // While we're doing DFS on call graph, remember
1852 // dependencies for efficient queuing of methods
1853 // during interprocedural analysis:
1855 // a dependent of a method decriptor d for this analysis is:
1856 // 1) a method or task that invokes d
1857 // 2) in the descriptorsToAnalyze set
1858 protected void dfsVisit( Descriptor d,
1859 Set <Descriptor> toSort,
1860 LinkedList<Descriptor> sorted,
1861 Set <Descriptor> discovered ) {
1862 discovered.add( d );
1864 // only methods have callers, tasks never do
1865 if( d instanceof MethodDescriptor ) {
1867 MethodDescriptor md = (MethodDescriptor) d;
1869 // the call graph is not aware that we have a fabricated
1870 // analysis entry that calls the program source's entry
1871 if( md == mdSourceEntry ) {
1872 if( !discovered.contains( mdAnalysisEntry ) ) {
1873 addDependent( mdSourceEntry, // callee
1874 mdAnalysisEntry // caller
1876 dfsVisit( mdAnalysisEntry, toSort, sorted, discovered );
1880 // otherwise call graph guides DFS
1881 Iterator itr = callGraph.getCallerSet( md ).iterator();
1882 while( itr.hasNext() ) {
1883 Descriptor dCaller = (Descriptor) itr.next();
1885 // only consider callers in the original set to analyze
1886 if( !toSort.contains( dCaller ) ) {
1890 if( !discovered.contains( dCaller ) ) {
1891 addDependent( md, // callee
1895 dfsVisit( dCaller, toSort, sorted, discovered );
1900 // for leaf-nodes last now!
1901 sorted.addLast( d );
1905 protected void enqueue( Descriptor d ) {
1907 if( !descriptorsToVisitSet.contains( d ) ) {
1909 if( state.DISJOINTDVISITSTACK ||
1910 state.DISJOINTDVISITSTACKEESONTOP
1912 descriptorsToVisitStack.add( d );
1914 } else if( state.DISJOINTDVISITPQUE ) {
1915 Integer priority = mapDescriptorToPriority.get( d );
1916 descriptorsToVisitQ.add( new DescriptorQWrapper( priority,
1921 descriptorsToVisitSet.add( d );
1926 // a dependent of a method decriptor d for this analysis is:
1927 // 1) a method or task that invokes d
1928 // 2) in the descriptorsToAnalyze set
1929 protected void addDependent( Descriptor callee, Descriptor caller ) {
1930 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
1931 if( deps == null ) {
1932 deps = new HashSet<Descriptor>();
1935 mapDescriptorToSetDependents.put( callee, deps );
1938 protected Set<Descriptor> getDependents( Descriptor callee ) {
1939 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
1940 if( deps == null ) {
1941 deps = new HashSet<Descriptor>();
1942 mapDescriptorToSetDependents.put( callee, deps );
1948 public Hashtable<FlatCall, ReachGraph> getIHMcontributions( Descriptor d ) {
1950 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1951 mapDescriptorToIHMcontributions.get( d );
1953 if( heapsFromCallers == null ) {
1954 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
1955 mapDescriptorToIHMcontributions.put( d, heapsFromCallers );
1958 return heapsFromCallers;
1961 public ReachGraph getIHMcontribution( Descriptor d,
1964 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1965 getIHMcontributions( d );
1967 if( !heapsFromCallers.containsKey( fc ) ) {
1971 return heapsFromCallers.get( fc );
1975 public void addIHMcontribution( Descriptor d,
1979 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1980 getIHMcontributions( d );
1982 heapsFromCallers.put( fc, rg );
1986 private AllocSite createParameterAllocSite( ReachGraph rg,
1987 TempDescriptor tempDesc,
1993 flatNew = new FlatNew( tempDesc.getType(), // type
1994 tempDesc, // param temp
1995 false, // global alloc?
1996 "param"+tempDesc // disjoint site ID string
1999 flatNew = new FlatNew( tempDesc.getType(), // type
2000 tempDesc, // param temp
2001 false, // global alloc?
2002 null // disjoint site ID string
2006 // create allocation site
2007 AllocSite as = AllocSite.factory( allocationDepth,
2009 flatNew.getDisjointId(),
2012 for (int i = 0; i < allocationDepth; ++i) {
2013 Integer id = generateUniqueHeapRegionNodeID();
2014 as.setIthOldest(i, id);
2015 mapHrnIdToAllocSite.put(id, as);
2017 // the oldest node is a summary node
2018 as.setSummary( generateUniqueHeapRegionNodeID() );
2026 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc){
2028 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
2029 if(!typeDesc.isImmutable()){
2030 ClassDescriptor classDesc = typeDesc.getClassDesc();
2031 for (Iterator it = classDesc.getFields(); it.hasNext();) {
2032 FieldDescriptor field = (FieldDescriptor) it.next();
2033 TypeDescriptor fieldType = field.getType();
2034 if (shouldAnalysisTrack( fieldType )) {
2035 fieldSet.add(field);
2043 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha ){
2045 int dimCount=fd.getType().getArrayCount();
2046 HeapRegionNode prevNode=null;
2047 HeapRegionNode arrayEntryNode=null;
2048 for(int i=dimCount;i>0;i--){
2049 TypeDescriptor typeDesc=fd.getType().dereference();//hack to get instance of type desc
2050 typeDesc.setArrayCount(i);
2051 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
2052 HeapRegionNode hrnSummary ;
2053 if(!mapToExistingNode.containsKey(typeDesc)){
2058 as = createParameterAllocSite(rg, tempDesc, false);
2060 // make a new reference to allocated node
2062 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2063 false, // single object?
2065 false, // out-of-context?
2066 as.getType(), // type
2067 as, // allocation site
2068 alpha, // inherent reach
2069 alpha, // current reach
2070 ExistPredSet.factory(rg.predTrue), // predicates
2071 tempDesc.toString() // description
2073 rg.id2hrn.put(as.getSummary(),hrnSummary);
2075 mapToExistingNode.put(typeDesc, hrnSummary);
2077 hrnSummary=mapToExistingNode.get(typeDesc);
2081 // make a new reference between new summary node and source
2082 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2085 fd.getSymbol(), // field name
2087 ExistPredSet.factory(rg.predTrue), // predicates
2091 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2092 prevNode=hrnSummary;
2093 arrayEntryNode=hrnSummary;
2095 // make a new reference between summary nodes of array
2096 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2099 arrayElementFieldName, // field name
2101 ExistPredSet.factory(rg.predTrue), // predicates
2105 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2106 prevNode=hrnSummary;
2111 // create a new obj node if obj has at least one non-primitive field
2112 TypeDescriptor type=fd.getType();
2113 if(getFieldSetTobeAnalyzed(type).size()>0){
2114 TypeDescriptor typeDesc=type.dereference();
2115 typeDesc.setArrayCount(0);
2116 if(!mapToExistingNode.containsKey(typeDesc)){
2117 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
2118 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
2119 // make a new reference to allocated node
2120 HeapRegionNode hrnSummary =
2121 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2122 false, // single object?
2124 false, // out-of-context?
2126 as, // allocation site
2127 alpha, // inherent reach
2128 alpha, // current reach
2129 ExistPredSet.factory(rg.predTrue), // predicates
2130 tempDesc.toString() // description
2132 rg.id2hrn.put(as.getSummary(),hrnSummary);
2133 mapToExistingNode.put(typeDesc, hrnSummary);
2134 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2137 arrayElementFieldName, // field name
2139 ExistPredSet.factory(rg.predTrue), // predicates
2142 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2143 prevNode=hrnSummary;
2145 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
2146 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null){
2147 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2150 arrayElementFieldName, // field name
2152 ExistPredSet.factory(rg.predTrue), // predicates
2155 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2157 prevNode=hrnSummary;
2161 map.put(arrayEntryNode, prevNode);
2162 return arrayEntryNode;
2165 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
2166 ReachGraph rg = new ReachGraph();
2167 TaskDescriptor taskDesc = fm.getTask();
2169 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
2170 Descriptor paramDesc = taskDesc.getParameter(idx);
2171 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
2173 // setup data structure
2174 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
2175 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
2176 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
2177 new Hashtable<TypeDescriptor, HeapRegionNode>();
2178 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
2179 new Hashtable<HeapRegionNode, HeapRegionNode>();
2180 Set<String> doneSet = new HashSet<String>();
2182 TempDescriptor tempDesc = fm.getParameter(idx);
2184 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
2185 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
2186 Integer idNewest = as.getIthOldest(0);
2187 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
2189 // make a new reference to allocated node
2190 RefEdge edgeNew = new RefEdge(lnX, // source
2192 taskDesc.getParamType(idx), // type
2194 hrnNewest.getAlpha(), // beta
2195 ExistPredSet.factory(rg.predTrue), // predicates
2198 rg.addRefEdge(lnX, hrnNewest, edgeNew);
2200 // set-up a work set for class field
2201 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
2202 for (Iterator it = classDesc.getFields(); it.hasNext();) {
2203 FieldDescriptor fd = (FieldDescriptor) it.next();
2204 TypeDescriptor fieldType = fd.getType();
2205 if (shouldAnalysisTrack( fieldType )) {
2206 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
2207 newMap.put(hrnNewest, fd);
2208 workSet.add(newMap);
2212 int uniqueIdentifier = 0;
2213 while (!workSet.isEmpty()) {
2214 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
2216 workSet.remove(map);
2218 Set<HeapRegionNode> key = map.keySet();
2219 HeapRegionNode srcHRN = key.iterator().next();
2220 FieldDescriptor fd = map.get(srcHRN);
2221 TypeDescriptor type = fd.getType();
2222 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
2224 if (!doneSet.contains(doneSetIdentifier)) {
2225 doneSet.add(doneSetIdentifier);
2226 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
2227 // create new summary Node
2228 TempDescriptor td = new TempDescriptor("temp"
2229 + uniqueIdentifier, type);
2231 AllocSite allocSite;
2232 if(type.equals(paramTypeDesc)){
2233 //corresponding allocsite has already been created for a parameter variable.
2236 allocSite = createParameterAllocSite(rg, td, false);
2238 String strDesc = allocSite.toStringForDOT()
2240 TypeDescriptor allocType=allocSite.getType();
2242 HeapRegionNode hrnSummary;
2243 if(allocType.isArray() && allocType.getArrayCount()>0){
2244 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
2247 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
2248 false, // single object?
2250 false, // out-of-context?
2251 allocSite.getType(), // type
2252 allocSite, // allocation site
2253 hrnNewest.getAlpha(), // inherent reach
2254 hrnNewest.getAlpha(), // current reach
2255 ExistPredSet.factory(rg.predTrue), // predicates
2256 strDesc // description
2258 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
2260 // make a new reference to summary node
2261 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2264 fd.getSymbol(), // field name
2265 hrnNewest.getAlpha(), // beta
2266 ExistPredSet.factory(rg.predTrue), // predicates
2270 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2274 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2276 // set-up a work set for fields of the class
2277 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2278 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2280 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2282 HeapRegionNode newDstHRN;
2283 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)){
2284 //related heap region node is already exsited.
2285 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2287 newDstHRN=hrnSummary;
2289 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2290 if(!doneSet.contains(doneSetIdentifier)){
2291 // add new work item
2292 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2293 new HashMap<HeapRegionNode, FieldDescriptor>();
2294 newMap.put(newDstHRN, fieldDescriptor);
2295 workSet.add(newMap);
2300 // if there exists corresponding summary node
2301 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2303 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2305 fd.getType(), // type
2306 fd.getSymbol(), // field name
2307 srcHRN.getAlpha(), // beta
2308 ExistPredSet.factory(rg.predTrue), // predicates
2311 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2317 // debugSnapshot(rg, fm, true);
2321 // return all allocation sites in the method (there is one allocation
2322 // site per FlatNew node in a method)
2323 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2324 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2325 buildAllocationSiteSet(d);
2328 return mapDescriptorToAllocSiteSet.get(d);
2332 private void buildAllocationSiteSet(Descriptor d) {
2333 HashSet<AllocSite> s = new HashSet<AllocSite>();
2336 if( d instanceof MethodDescriptor ) {
2337 fm = state.getMethodFlat( (MethodDescriptor) d);
2339 assert d instanceof TaskDescriptor;
2340 fm = state.getMethodFlat( (TaskDescriptor) d);
2342 pm.analyzeMethod(fm);
2344 // visit every node in this FlatMethod's IR graph
2345 // and make a set of the allocation sites from the
2346 // FlatNew node's visited
2347 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2348 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2351 while( !toVisit.isEmpty() ) {
2352 FlatNode n = toVisit.iterator().next();
2354 if( n instanceof FlatNew ) {
2355 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2361 for( int i = 0; i < pm.numNext(n); ++i ) {
2362 FlatNode child = pm.getNext(n, i);
2363 if( !visited.contains(child) ) {
2369 mapDescriptorToAllocSiteSet.put(d, s);
2372 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
2374 HashSet<AllocSite> out = new HashSet<AllocSite>();
2375 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2376 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2380 while (!toVisit.isEmpty()) {
2381 Descriptor d = toVisit.iterator().next();
2385 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2386 Iterator asItr = asSet.iterator();
2387 while (asItr.hasNext()) {
2388 AllocSite as = (AllocSite) asItr.next();
2389 if (as.getDisjointAnalysisId() != null) {
2394 // enqueue callees of this method to be searched for
2395 // allocation sites also
2396 Set callees = callGraph.getCalleeSet(d);
2397 if (callees != null) {
2398 Iterator methItr = callees.iterator();
2399 while (methItr.hasNext()) {
2400 MethodDescriptor md = (MethodDescriptor) methItr.next();
2402 if (!visited.contains(md)) {
2413 private HashSet<AllocSite>
2414 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
2416 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
2417 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2418 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2422 // traverse this task and all methods reachable from this task
2423 while( !toVisit.isEmpty() ) {
2424 Descriptor d = toVisit.iterator().next();
2428 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2429 Iterator asItr = asSet.iterator();
2430 while( asItr.hasNext() ) {
2431 AllocSite as = (AllocSite) asItr.next();
2432 TypeDescriptor typed = as.getType();
2433 if( typed != null ) {
2434 ClassDescriptor cd = typed.getClassDesc();
2435 if( cd != null && cd.hasFlags() ) {
2441 // enqueue callees of this method to be searched for
2442 // allocation sites also
2443 Set callees = callGraph.getCalleeSet(d);
2444 if( callees != null ) {
2445 Iterator methItr = callees.iterator();
2446 while( methItr.hasNext() ) {
2447 MethodDescriptor md = (MethodDescriptor) methItr.next();
2449 if( !visited.contains(md) ) {
2459 public Set<Descriptor> getDescriptorsToAnalyze() {
2460 return descriptorsToAnalyze;
2463 public EffectsAnalysis getEffectsAnalysis(){
2464 return effectsAnalysis;
2468 // get successive captures of the analysis state, use compiler
2470 boolean takeDebugSnapshots = false;
2471 String descSymbolDebug = null;
2472 boolean stopAfterCapture = false;
2473 int snapVisitCounter = 0;
2474 int snapNodeCounter = 0;
2475 int visitStartCapture = 0;
2476 int numVisitsToCapture = 0;
2479 void debugSnapshot( ReachGraph rg, FlatNode fn, boolean in ) {
2480 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
2488 if( snapVisitCounter >= visitStartCapture ) {
2489 System.out.println( " @@@ snapping visit="+snapVisitCounter+
2490 ", node="+snapNodeCounter+
2494 graphName = String.format( "snap%03d_%04din",
2498 graphName = String.format( "snap%03d_%04dout",
2503 graphName = graphName + fn;
2505 rg.writeGraph( graphName,
2506 true, // write labels (variables)
2507 true, // selectively hide intermediate temp vars
2508 true, // prune unreachable heap regions
2509 false, // hide reachability
2510 true, // hide subset reachability states
2511 true, // hide predicates
2512 false );// hide edge taints