1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
6 import Analysis.OoOJava.RBlockRelationAnalysis;
7 import Analysis.OoOJava.RBlockStatusAnalysis;
10 import IR.Tree.Modifiers;
15 public class DisjointAnalysis {
17 ///////////////////////////////////////////
19 // Public interface to discover possible
20 // aliases in the program under analysis
22 ///////////////////////////////////////////
24 public HashSet<AllocSite>
25 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
26 checkAnalysisComplete();
27 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
30 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
31 checkAnalysisComplete();
32 return getAllocSiteFromFlatNewPRIVATE(fn);
35 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
36 checkAnalysisComplete();
37 return mapHrnIdToAllocSite.get(id);
40 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
43 checkAnalysisComplete();
44 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
45 FlatMethod fm=state.getMethodFlat(taskOrMethod);
47 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
50 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
51 int paramIndex, AllocSite alloc) {
52 checkAnalysisComplete();
53 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
54 FlatMethod fm=state.getMethodFlat(taskOrMethod);
56 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
59 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
60 AllocSite alloc, int paramIndex) {
61 checkAnalysisComplete();
62 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
63 FlatMethod fm=state.getMethodFlat(taskOrMethod);
65 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
68 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
69 AllocSite alloc1, AllocSite alloc2) {
70 checkAnalysisComplete();
71 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
73 return rg.mayReachSharedObjects(alloc1, alloc2);
76 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
77 checkAnalysisComplete();
81 Iterator<HeapRegionNode> i = s.iterator();
83 HeapRegionNode n = i.next();
85 AllocSite as = n.getAllocSite();
87 out += " " + n.toString() + ",\n";
89 out += " " + n.toString() + ": " + as.toStringVerbose()
98 // use the methods given above to check every possible sharing class
99 // between task parameters and flagged allocation sites reachable
101 public void writeAllSharing(String outputFile,
104 boolean tabularOutput,
107 throws java.io.IOException {
108 checkAnalysisComplete();
110 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
112 if (!tabularOutput) {
113 bw.write("Conducting ownership analysis with allocation depth = "
114 + allocationDepth + "\n");
115 bw.write(timeReport + "\n");
120 // look through every task for potential sharing
121 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
122 while (taskItr.hasNext()) {
123 TaskDescriptor td = (TaskDescriptor) taskItr.next();
125 if (!tabularOutput) {
126 bw.write("\n---------" + td + "--------\n");
129 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
131 Set<HeapRegionNode> common;
133 // for each task parameter, check for sharing classes with
134 // other task parameters and every allocation site
135 // reachable from this task
136 boolean foundSomeSharing = false;
138 FlatMethod fm = state.getMethodFlat(td);
139 for (int i = 0; i < fm.numParameters(); ++i) {
141 // skip parameters with types that cannot reference
143 if( !shouldAnalysisTrack( fm.getParameter( i ).getType() ) ) {
147 // for the ith parameter check for sharing classes to all
148 // higher numbered parameters
149 for (int j = i + 1; j < fm.numParameters(); ++j) {
151 // skip parameters with types that cannot reference
153 if( !shouldAnalysisTrack( fm.getParameter( j ).getType() ) ) {
158 common = hasPotentialSharing(td, i, j);
159 if (!common.isEmpty()) {
160 foundSomeSharing = true;
162 if (!tabularOutput) {
163 bw.write("Potential sharing between parameters " + i
164 + " and " + j + ".\n");
165 bw.write(prettyPrintNodeSet(common) + "\n");
170 // for the ith parameter, check for sharing classes against
171 // the set of allocation sites reachable from this
173 Iterator allocItr = allocSites.iterator();
174 while (allocItr.hasNext()) {
175 AllocSite as = (AllocSite) allocItr.next();
176 common = hasPotentialSharing(td, i, as);
177 if (!common.isEmpty()) {
178 foundSomeSharing = true;
180 if (!tabularOutput) {
181 bw.write("Potential sharing between parameter " + i
182 + " and " + as.getFlatNew() + ".\n");
183 bw.write(prettyPrintNodeSet(common) + "\n");
189 // for each allocation site check for sharing classes with
190 // other allocation sites in the context of execution
192 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
193 Iterator allocItr1 = allocSites.iterator();
194 while (allocItr1.hasNext()) {
195 AllocSite as1 = (AllocSite) allocItr1.next();
197 Iterator allocItr2 = allocSites.iterator();
198 while (allocItr2.hasNext()) {
199 AllocSite as2 = (AllocSite) allocItr2.next();
201 if (!outerChecked.contains(as2)) {
202 common = hasPotentialSharing(td, as1, as2);
204 if (!common.isEmpty()) {
205 foundSomeSharing = true;
207 if (!tabularOutput) {
208 bw.write("Potential sharing between "
209 + as1.getFlatNew() + " and "
210 + as2.getFlatNew() + ".\n");
211 bw.write(prettyPrintNodeSet(common) + "\n");
217 outerChecked.add(as1);
220 if (!foundSomeSharing) {
221 if (!tabularOutput) {
222 bw.write("No sharing between flagged objects in Task " + td
230 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
231 + " & " + numMethodsAnalyzed() + " \\\\\n");
233 bw.write("\nNumber sharing classes: "+numSharing);
239 // this version of writeAllSharing is for Java programs that have no tasks
240 public void writeAllSharingJava(String outputFile,
243 boolean tabularOutput,
246 throws java.io.IOException {
247 checkAnalysisComplete();
253 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
255 bw.write("Conducting disjoint reachability analysis with allocation depth = "
256 + allocationDepth + "\n");
257 bw.write(timeReport + "\n\n");
259 boolean foundSomeSharing = false;
261 Descriptor d = typeUtil.getMain();
262 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
264 // for each allocation site check for sharing classes with
265 // other allocation sites in the context of execution
267 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
268 Iterator allocItr1 = allocSites.iterator();
269 while (allocItr1.hasNext()) {
270 AllocSite as1 = (AllocSite) allocItr1.next();
272 Iterator allocItr2 = allocSites.iterator();
273 while (allocItr2.hasNext()) {
274 AllocSite as2 = (AllocSite) allocItr2.next();
276 if (!outerChecked.contains(as2)) {
277 Set<HeapRegionNode> common = hasPotentialSharing(d,
280 if (!common.isEmpty()) {
281 foundSomeSharing = true;
282 bw.write("Potential sharing between "
283 + as1.getDisjointAnalysisId() + " and "
284 + as2.getDisjointAnalysisId() + ".\n");
285 bw.write(prettyPrintNodeSet(common) + "\n");
291 outerChecked.add(as1);
294 if (!foundSomeSharing) {
295 bw.write("No sharing classes between flagged objects found.\n");
297 bw.write("\nNumber sharing classes: "+numSharing);
300 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
305 ///////////////////////////////////////////
307 // end public interface
309 ///////////////////////////////////////////
311 protected void checkAnalysisComplete() {
312 if( !analysisComplete ) {
313 throw new Error("Warning: public interface method called while analysis is running.");
318 // run in faster mode, only when bugs wrung out!
319 public static boolean releaseMode;
321 // use command line option to set this, analysis
322 // should attempt to be deterministic
323 public static boolean determinismDesired;
325 // when we want to enforce determinism in the
326 // analysis we need to sort descriptors rather
327 // than toss them in efficient sets, use this
328 public static DescriptorComparator dComp =
329 new DescriptorComparator();
332 // data from the compiler
334 public CallGraph callGraph;
335 public Liveness liveness;
336 public ArrayReferencees arrayReferencees;
337 public RBlockRelationAnalysis rblockRel;
338 public RBlockStatusAnalysis rblockStatus;
339 public TypeUtil typeUtil;
340 public int allocationDepth;
342 protected boolean doEffectsAnalysis = false;
343 protected EffectsAnalysis effectsAnalysis;
345 // data structure for public interface
346 private Hashtable< Descriptor, HashSet<AllocSite> >
347 mapDescriptorToAllocSiteSet;
350 // for public interface methods to warn that they
351 // are grabbing results during analysis
352 private boolean analysisComplete;
355 // used to identify HeapRegionNode objects
356 // A unique ID equates an object in one
357 // ownership graph with an object in another
358 // graph that logically represents the same
360 // start at 10 and increment to reserve some
361 // IDs for special purposes
362 static protected int uniqueIDcount = 10;
365 // An out-of-scope method created by the
366 // analysis that has no parameters, and
367 // appears to allocate the command line
368 // arguments, then invoke the source code's
369 // main method. The purpose of this is to
370 // provide the analysis with an explicit
371 // top-level context with no parameters
372 protected MethodDescriptor mdAnalysisEntry;
373 protected FlatMethod fmAnalysisEntry;
375 // main method defined by source program
376 protected MethodDescriptor mdSourceEntry;
378 // the set of task and/or method descriptors
379 // reachable in call graph
380 protected Set<Descriptor>
381 descriptorsToAnalyze;
383 // current descriptors to visit in fixed-point
384 // interprocedural analysis, prioritized by
385 // dependency in the call graph
386 protected Stack<Descriptor>
387 descriptorsToVisitStack;
388 protected PriorityQueue<DescriptorQWrapper>
391 // a duplication of the above structure, but
392 // for efficient testing of inclusion
393 protected HashSet<Descriptor>
394 descriptorsToVisitSet;
396 // storage for priorities (doesn't make sense)
397 // to add it to the Descriptor class, just in
399 protected Hashtable<Descriptor, Integer>
400 mapDescriptorToPriority;
402 // when analyzing a method and scheduling more:
403 // remember set of callee's enqueued for analysis
404 // so they can be put on top of the callers in
405 // the stack-visit mode
406 protected Set<Descriptor>
409 // maps a descriptor to its current partial result
410 // from the intraprocedural fixed-point analysis--
411 // then the interprocedural analysis settles, this
412 // mapping will have the final results for each
414 protected Hashtable<Descriptor, ReachGraph>
415 mapDescriptorToCompleteReachGraph;
417 // maps a descriptor to its known dependents: namely
418 // methods or tasks that call the descriptor's method
419 // AND are part of this analysis (reachable from main)
420 protected Hashtable< Descriptor, Set<Descriptor> >
421 mapDescriptorToSetDependents;
423 // if the analysis client wants to flag allocation sites
424 // programmatically, it should provide a set of FlatNew
425 // statements--this may be null if unneeded
426 protected Set<FlatNew> sitesToFlag;
428 // maps each flat new to one analysis abstraction
429 // allocate site object, these exist outside reach graphs
430 protected Hashtable<FlatNew, AllocSite>
431 mapFlatNewToAllocSite;
433 // maps intergraph heap region IDs to intergraph
434 // allocation sites that created them, a redundant
435 // structure for efficiency in some operations
436 protected Hashtable<Integer, AllocSite>
439 // maps a method to its initial heap model (IHM) that
440 // is the set of reachability graphs from every caller
441 // site, all merged together. The reason that we keep
442 // them separate is that any one call site's contribution
443 // to the IHM may changed along the path to the fixed point
444 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
445 mapDescriptorToIHMcontributions;
447 // additionally, keep a mapping from descriptors to the
448 // merged in-coming initial context, because we want this
449 // initial context to be STRICTLY MONOTONIC
450 protected Hashtable<Descriptor, ReachGraph>
451 mapDescriptorToInitialContext;
453 // make the result for back edges analysis-wide STRICTLY
454 // MONOTONIC as well, but notice we use FlatNode as the
455 // key for this map: in case we want to consider other
456 // nodes as back edge's in future implementations
457 protected Hashtable<FlatNode, ReachGraph>
458 mapBackEdgeToMonotone;
461 public static final String arrayElementFieldName = "___element_";
462 static protected Hashtable<TypeDescriptor, FieldDescriptor>
465 // for controlling DOT file output
466 protected boolean writeFinalDOTs;
467 protected boolean writeAllIncrementalDOTs;
469 // supporting DOT output--when we want to write every
470 // partial method result, keep a tally for generating
472 protected Hashtable<Descriptor, Integer>
473 mapDescriptorToNumUpdates;
475 //map task descriptor to initial task parameter
476 protected Hashtable<Descriptor, ReachGraph>
477 mapDescriptorToReachGraph;
479 protected PointerMethod pm;
481 static protected Hashtable<FlatNode, ReachGraph> fn2rg =
482 new Hashtable<FlatNode, ReachGraph>();
484 private Hashtable<FlatCall, Descriptor> fc2enclosing;
487 // allocate various structures that are not local
488 // to a single class method--should be done once
489 protected void allocateStructures() {
491 if( determinismDesired ) {
492 // use an ordered set
493 descriptorsToAnalyze = new TreeSet<Descriptor>( dComp );
495 // otherwise use a speedy hashset
496 descriptorsToAnalyze = new HashSet<Descriptor>();
499 mapDescriptorToCompleteReachGraph =
500 new Hashtable<Descriptor, ReachGraph>();
502 mapDescriptorToNumUpdates =
503 new Hashtable<Descriptor, Integer>();
505 mapDescriptorToSetDependents =
506 new Hashtable< Descriptor, Set<Descriptor> >();
508 mapFlatNewToAllocSite =
509 new Hashtable<FlatNew, AllocSite>();
511 mapDescriptorToIHMcontributions =
512 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
514 mapDescriptorToInitialContext =
515 new Hashtable<Descriptor, ReachGraph>();
517 mapBackEdgeToMonotone =
518 new Hashtable<FlatNode, ReachGraph>();
520 mapHrnIdToAllocSite =
521 new Hashtable<Integer, AllocSite>();
523 mapTypeToArrayField =
524 new Hashtable <TypeDescriptor, FieldDescriptor>();
526 if( state.DISJOINTDVISITSTACK ||
527 state.DISJOINTDVISITSTACKEESONTOP
529 descriptorsToVisitStack =
530 new Stack<Descriptor>();
533 if( state.DISJOINTDVISITPQUE ) {
534 descriptorsToVisitQ =
535 new PriorityQueue<DescriptorQWrapper>();
538 descriptorsToVisitSet =
539 new HashSet<Descriptor>();
541 mapDescriptorToPriority =
542 new Hashtable<Descriptor, Integer>();
545 new HashSet<Descriptor>();
547 mapDescriptorToAllocSiteSet =
548 new Hashtable<Descriptor, HashSet<AllocSite> >();
550 mapDescriptorToReachGraph =
551 new Hashtable<Descriptor, ReachGraph>();
553 pm = new PointerMethod();
555 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
560 // this analysis generates a disjoint reachability
561 // graph for every reachable method in the program
562 public DisjointAnalysis( State s,
567 Set<FlatNew> sitesToFlag,
568 RBlockRelationAnalysis rra,
569 RBlockStatusAnalysis rsa
571 init( s, tu, cg, l, ar, sitesToFlag, rra, rsa );
574 protected void init( State state,
578 ArrayReferencees arrayReferencees,
579 Set<FlatNew> sitesToFlag,
580 RBlockRelationAnalysis rra,
581 RBlockStatusAnalysis rsa
584 analysisComplete = false;
587 this.typeUtil = typeUtil;
588 this.callGraph = callGraph;
589 this.liveness = liveness;
590 this.arrayReferencees = arrayReferencees;
591 this.sitesToFlag = sitesToFlag;
592 this.rblockRel = rra;
593 this.rblockStatus = rsa;
595 if( rblockRel != null ) {
596 doEffectsAnalysis = true;
597 effectsAnalysis = new EffectsAnalysis();
600 this.allocationDepth = state.DISJOINTALLOCDEPTH;
601 this.releaseMode = state.DISJOINTRELEASEMODE;
602 this.determinismDesired = state.DISJOINTDETERMINISM;
604 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL;
605 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL;
607 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
608 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
609 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
610 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
611 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
612 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
613 this.snapNodeCounter = 0; // count nodes from 0
616 state.DISJOINTDVISITSTACK ||
617 state.DISJOINTDVISITPQUE ||
618 state.DISJOINTDVISITSTACKEESONTOP;
619 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
620 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
621 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
623 // set some static configuration for ReachGraphs
624 ReachGraph.allocationDepth = allocationDepth;
625 ReachGraph.typeUtil = typeUtil;
627 ReachGraph.debugCallSiteVisitStartCapture
628 = state.DISJOINTDEBUGCALLVISITTOSTART;
630 ReachGraph.debugCallSiteNumVisitsToCapture
631 = state.DISJOINTDEBUGCALLNUMVISITS;
633 ReachGraph.debugCallSiteStopAfter
634 = state.DISJOINTDEBUGCALLSTOPAFTER;
636 ReachGraph.debugCallSiteVisitCounter
637 = 0; // count visits from 1, is incremented before first visit
641 allocateStructures();
643 double timeStartAnalysis = (double) System.nanoTime();
645 // start interprocedural fixed-point computation
648 } catch( IOException e ) {
649 throw new Error( "IO Exception while writing disjointness analysis output." );
652 analysisComplete=true;
654 double timeEndAnalysis = (double) System.nanoTime();
655 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow( 10.0, 9.0 ) );
656 String treport = String.format( "The reachability analysis took %.3f sec.", dt );
657 String justtime = String.format( "%.2f", dt );
658 System.out.println( treport );
661 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
665 if( state.DISJOINTWRITEIHMS ) {
669 if( state.DISJOINTWRITEINITCONTEXTS ) {
670 writeInitialContexts();
673 if( state.DISJOINTALIASFILE != null ) {
675 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
677 writeAllSharingJava(state.DISJOINTALIASFILE,
680 state.DISJOINTALIASTAB,
685 } catch( IOException e ) {
686 throw new Error( "IO Exception while writing disjointness analysis output." );
689 if( doEffectsAnalysis ) {
690 effectsAnalysis.writeEffects( "effects.txt" );
695 protected boolean moreDescriptorsToVisit() {
696 if( state.DISJOINTDVISITSTACK ||
697 state.DISJOINTDVISITSTACKEESONTOP
699 return !descriptorsToVisitStack.isEmpty();
701 } else if( state.DISJOINTDVISITPQUE ) {
702 return !descriptorsToVisitQ.isEmpty();
705 throw new Error( "Neither descriptor visiting mode set" );
709 // fixed-point computation over the call graph--when a
710 // method's callees are updated, it must be reanalyzed
711 protected void analyzeMethods() throws java.io.IOException {
713 // task or non-task (java) mode determines what the roots
714 // of the call chain are, and establishes the set of methods
715 // reachable from the roots that will be analyzed
718 System.out.println( "Bamboo mode..." );
720 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
721 while( taskItr.hasNext() ) {
722 TaskDescriptor td = (TaskDescriptor) taskItr.next();
723 if( !descriptorsToAnalyze.contains( td ) ) {
724 // add all methods transitively reachable from the
726 descriptorsToAnalyze.add( td );
727 descriptorsToAnalyze.addAll( callGraph.getAllMethods( td ) );
732 System.out.println( "Java mode..." );
734 // add all methods transitively reachable from the
735 // source's main to set for analysis
736 mdSourceEntry = typeUtil.getMain();
737 descriptorsToAnalyze.add( mdSourceEntry );
738 descriptorsToAnalyze.addAll( callGraph.getAllMethods( mdSourceEntry ) );
740 // fabricate an empty calling context that will call
741 // the source's main, but call graph doesn't know
742 // about it, so explicitly add it
743 makeAnalysisEntryMethod( mdSourceEntry );
744 descriptorsToAnalyze.add( mdAnalysisEntry );
748 // now, depending on the interprocedural mode for visiting
749 // methods, set up the needed data structures
751 if( state.DISJOINTDVISITPQUE ) {
753 // topologically sort according to the call graph so
754 // leaf calls are last, helps build contexts up first
755 LinkedList<Descriptor> sortedDescriptors =
756 topologicalSort( descriptorsToAnalyze );
758 // add sorted descriptors to priority queue, and duplicate
759 // the queue as a set for efficiently testing whether some
760 // method is marked for analysis
762 Iterator<Descriptor> dItr;
764 // for the priority queue, give items at the head
765 // of the sorted list a low number (highest priority)
766 while( !sortedDescriptors.isEmpty() ) {
767 Descriptor d = sortedDescriptors.removeFirst();
768 mapDescriptorToPriority.put( d, new Integer( p ) );
769 descriptorsToVisitQ.add( new DescriptorQWrapper( p, d ) );
770 descriptorsToVisitSet.add( d );
774 } else if( state.DISJOINTDVISITSTACK ||
775 state.DISJOINTDVISITSTACKEESONTOP
777 // if we're doing the stack scheme, just throw the root
778 // method or tasks on the stack
780 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
781 while( taskItr.hasNext() ) {
782 TaskDescriptor td = (TaskDescriptor) taskItr.next();
783 descriptorsToVisitStack.add( td );
784 descriptorsToVisitSet.add( td );
788 descriptorsToVisitStack.add( mdAnalysisEntry );
789 descriptorsToVisitSet.add( mdAnalysisEntry );
793 throw new Error( "Unknown method scheduling mode" );
797 // analyze scheduled methods until there are no more to visit
798 while( moreDescriptorsToVisit() ) {
801 if( state.DISJOINTDVISITSTACK ||
802 state.DISJOINTDVISITSTACKEESONTOP
804 d = descriptorsToVisitStack.pop();
806 } else if( state.DISJOINTDVISITPQUE ) {
807 d = descriptorsToVisitQ.poll().getDescriptor();
810 assert descriptorsToVisitSet.contains( d );
811 descriptorsToVisitSet.remove( d );
813 // because the task or method descriptor just extracted
814 // was in the "to visit" set it either hasn't been analyzed
815 // yet, or some method that it depends on has been
816 // updated. Recompute a complete reachability graph for
817 // this task/method and compare it to any previous result.
818 // If there is a change detected, add any methods/tasks
819 // that depend on this one to the "to visit" set.
821 System.out.println( "Analyzing " + d );
823 if( state.DISJOINTDVISITSTACKEESONTOP ) {
824 assert calleesToEnqueue.isEmpty();
827 ReachGraph rg = analyzeMethod( d );
828 ReachGraph rgPrev = getPartial( d );
830 if( !rg.equals( rgPrev ) ) {
833 if( state.DISJOINTDEBUGSCHEDULING ) {
834 System.out.println( " complete graph changed, scheduling callers for analysis:" );
837 // results for d changed, so enqueue dependents
838 // of d for further analysis
839 Iterator<Descriptor> depsItr = getDependents( d ).iterator();
840 while( depsItr.hasNext() ) {
841 Descriptor dNext = depsItr.next();
844 if( state.DISJOINTDEBUGSCHEDULING ) {
845 System.out.println( " "+dNext );
850 // whether or not the method under analysis changed,
851 // we may have some callees that are scheduled for
852 // more analysis, and they should go on the top of
853 // the stack now (in other method-visiting modes they
854 // are already enqueued at this point
855 if( state.DISJOINTDVISITSTACKEESONTOP ) {
856 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
857 while( depsItr.hasNext() ) {
858 Descriptor dNext = depsItr.next();
861 calleesToEnqueue.clear();
867 protected ReachGraph analyzeMethod( Descriptor d )
868 throws java.io.IOException {
870 // get the flat code for this descriptor
872 if( d == mdAnalysisEntry ) {
873 fm = fmAnalysisEntry;
875 fm = state.getMethodFlat( d );
877 pm.analyzeMethod( fm );
879 // intraprocedural work set
880 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
881 flatNodesToVisit.add( fm );
883 // if determinism is desired by client, shadow the
884 // set with a queue to make visit order deterministic
885 Queue<FlatNode> flatNodesToVisitQ = null;
886 if( determinismDesired ) {
887 flatNodesToVisitQ = new LinkedList<FlatNode>();
888 flatNodesToVisitQ.add( fm );
891 // mapping of current partial results
892 Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph =
893 new Hashtable<FlatNode, ReachGraph>();
895 // the set of return nodes partial results that will be combined as
896 // the final, conservative approximation of the entire method
897 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
899 while( !flatNodesToVisit.isEmpty() ) {
902 if( determinismDesired ) {
903 assert !flatNodesToVisitQ.isEmpty();
904 fn = flatNodesToVisitQ.remove();
906 fn = flatNodesToVisit.iterator().next();
908 flatNodesToVisit.remove( fn );
910 // effect transfer function defined by this node,
911 // then compare it to the old graph at this node
912 // to see if anything was updated.
914 ReachGraph rg = new ReachGraph();
915 TaskDescriptor taskDesc;
916 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null){
917 if(mapDescriptorToReachGraph.containsKey(taskDesc)){
918 // retrieve existing reach graph if it is not first time
919 rg=mapDescriptorToReachGraph.get(taskDesc);
921 // create initial reach graph for a task
922 rg=createInitialTaskReachGraph((FlatMethod)fn);
924 mapDescriptorToReachGraph.put(taskDesc, rg);
928 // start by merging all node's parents' graphs
929 for( int i = 0; i < pm.numPrev(fn); ++i ) {
930 FlatNode pn = pm.getPrev(fn,i);
931 if( mapFlatNodeToReachGraph.containsKey( pn ) ) {
932 ReachGraph rgParent = mapFlatNodeToReachGraph.get( pn );
933 rg.merge( rgParent );
938 if( takeDebugSnapshots &&
939 d.getSymbol().equals( descSymbolDebug )
941 debugSnapshot( rg, fn, true );
945 // modify rg with appropriate transfer function
946 rg = analyzeFlatNode( d, fm, fn, setReturns, rg );
949 if( takeDebugSnapshots &&
950 d.getSymbol().equals( descSymbolDebug )
952 debugSnapshot( rg, fn, false );
957 // if the results of the new graph are different from
958 // the current graph at this node, replace the graph
959 // with the update and enqueue the children
960 ReachGraph rgPrev = mapFlatNodeToReachGraph.get( fn );
961 if( !rg.equals( rgPrev ) ) {
962 mapFlatNodeToReachGraph.put( fn, rg );
964 for( int i = 0; i < pm.numNext( fn ); i++ ) {
965 FlatNode nn = pm.getNext( fn, i );
967 flatNodesToVisit.add( nn );
968 if( determinismDesired ) {
969 flatNodesToVisitQ.add( nn );
976 // end by merging all return nodes into a complete
977 // reach graph that represents all possible heap
978 // states after the flat method returns
979 ReachGraph completeGraph = new ReachGraph();
981 assert !setReturns.isEmpty();
982 Iterator retItr = setReturns.iterator();
983 while( retItr.hasNext() ) {
984 FlatReturnNode frn = (FlatReturnNode) retItr.next();
986 assert mapFlatNodeToReachGraph.containsKey( frn );
987 ReachGraph rgRet = mapFlatNodeToReachGraph.get( frn );
989 completeGraph.merge( rgRet );
993 if( takeDebugSnapshots &&
994 d.getSymbol().equals( descSymbolDebug )
996 // increment that we've visited the debug snap
997 // method, and reset the node counter
998 System.out.println( " @@@ debug snap at visit "+snapVisitCounter );
1000 snapNodeCounter = 0;
1002 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
1005 System.out.println( "!!! Stopping analysis after debug snap captures. !!!" );
1011 return completeGraph;
1015 protected ReachGraph
1016 analyzeFlatNode( Descriptor d,
1017 FlatMethod fmContaining,
1019 HashSet<FlatReturnNode> setRetNodes,
1021 ) throws java.io.IOException {
1024 // any variables that are no longer live should be
1025 // nullified in the graph to reduce edges
1026 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1030 FieldDescriptor fld;
1031 TypeDescriptor tdElement;
1032 FieldDescriptor fdElement;
1033 FlatSESEEnterNode sese;
1034 FlatSESEExitNode fsexn;
1036 // use node type to decide what transfer function
1037 // to apply to the reachability graph
1038 switch( fn.kind() ) {
1040 case FKind.FlatMethod: {
1041 // construct this method's initial heap model (IHM)
1042 // since we're working on the FlatMethod, we know
1043 // the incoming ReachGraph 'rg' is empty
1045 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1046 getIHMcontributions( d );
1048 Set entrySet = heapsFromCallers.entrySet();
1049 Iterator itr = entrySet.iterator();
1050 while( itr.hasNext() ) {
1051 Map.Entry me = (Map.Entry) itr.next();
1052 FlatCall fc = (FlatCall) me.getKey();
1053 ReachGraph rgContrib = (ReachGraph) me.getValue();
1055 assert fc.getMethod().equals( d );
1057 rg.merge( rgContrib );
1060 // additionally, we are enforcing STRICT MONOTONICITY for the
1061 // method's initial context, so grow the context by whatever
1062 // the previously computed context was, and put the most
1063 // up-to-date context back in the map
1064 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get( d );
1065 rg.merge( rgPrevContext );
1066 mapDescriptorToInitialContext.put( d, rg );
1070 case FKind.FlatOpNode:
1071 FlatOpNode fon = (FlatOpNode) fn;
1072 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1073 lhs = fon.getDest();
1074 rhs = fon.getLeft();
1076 // before transfer, do effects analysis support
1077 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1078 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1079 // x gets status of y
1080 if(!rg.isAccessible(rhs)){
1081 rg.makeInaccessible(lhs);
1087 rg.assignTempXEqualToTempY( lhs, rhs );
1091 case FKind.FlatCastNode:
1092 FlatCastNode fcn = (FlatCastNode) fn;
1096 TypeDescriptor td = fcn.getType();
1099 // before transfer, do effects analysis support
1100 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1101 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1102 // x gets status of y
1103 if(!rg.isAccessible(rhs)){
1104 rg.makeInaccessible(lhs);
1110 rg.assignTempXEqualToCastedTempY( lhs, rhs, td );
1113 case FKind.FlatFieldNode:
1114 FlatFieldNode ffn = (FlatFieldNode) fn;
1118 fld = ffn.getField();
1120 // before graph transform, possible inject
1121 // a stall-site taint
1122 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1124 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1125 // x=y.f, stall y if not accessible
1126 // contributes read effects on stall site of y
1127 if(!rg.isAccessible(rhs)) {
1128 rg.taintStallSite(fn, rhs);
1131 // after this, x and y are accessbile.
1132 rg.makeAccessible(lhs);
1133 rg.makeAccessible(rhs);
1137 if( shouldAnalysisTrack( fld.getType() ) ) {
1139 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fld );
1142 // after transfer, use updated graph to
1143 // do effects analysis
1144 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1145 effectsAnalysis.analyzeFlatFieldNode( rg, rhs, fld );
1149 case FKind.FlatSetFieldNode:
1150 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1152 lhs = fsfn.getDst();
1153 fld = fsfn.getField();
1154 rhs = fsfn.getSrc();
1156 boolean strongUpdate = false;
1158 // before transfer func, possibly inject
1159 // stall-site taints
1160 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1162 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1163 // x.y=f , stall x and y if they are not accessible
1164 // also contribute write effects on stall site of x
1165 if(!rg.isAccessible(lhs)) {
1166 rg.taintStallSite(fn, lhs);
1169 if(!rg.isAccessible(rhs)) {
1170 rg.taintStallSite(fn, rhs);
1173 // accessible status update
1174 rg.makeAccessible(lhs);
1175 rg.makeAccessible(rhs);
1179 if( shouldAnalysisTrack( fld.getType() ) ) {
1181 strongUpdate = rg.assignTempXFieldFEqualToTempY( lhs, fld, rhs );
1184 // use transformed graph to do effects analysis
1185 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1186 effectsAnalysis.analyzeFlatSetFieldNode( rg, lhs, fld, strongUpdate );
1190 case FKind.FlatElementNode:
1191 FlatElementNode fen = (FlatElementNode) fn;
1196 assert rhs.getType() != null;
1197 assert rhs.getType().isArray();
1199 tdElement = rhs.getType().dereference();
1200 fdElement = getArrayField( tdElement );
1202 // before transfer func, possibly inject
1204 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1206 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1207 // x=y.f, stall y if not accessible
1208 // contributes read effects on stall site of y
1209 // after this, x and y are accessbile.
1210 if(!rg.isAccessible(rhs)) {
1211 rg.taintStallSite(fn, rhs);
1214 rg.makeAccessible(lhs);
1215 rg.makeAccessible(rhs);
1219 if( shouldAnalysisTrack( lhs.getType() ) ) {
1221 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fdElement );
1224 // use transformed graph to do effects analysis
1225 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1226 effectsAnalysis.analyzeFlatFieldNode( rg, rhs, fdElement );
1230 case FKind.FlatSetElementNode:
1231 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1233 lhs = fsen.getDst();
1234 rhs = fsen.getSrc();
1236 assert lhs.getType() != null;
1237 assert lhs.getType().isArray();
1239 tdElement = lhs.getType().dereference();
1240 fdElement = getArrayField( tdElement );
1242 // before transfer func, possibly inject
1243 // stall-site taints
1244 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1246 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1247 // x.y=f , stall x and y if they are not accessible
1248 // also contribute write effects on stall site of x
1249 if(!rg.isAccessible(lhs)) {
1250 rg.taintStallSite(fn, lhs);
1253 if(!rg.isAccessible(rhs)) {
1254 rg.taintStallSite(fn, rhs);
1257 // accessible status update
1258 rg.makeAccessible(lhs);
1259 rg.makeAccessible(rhs);
1263 if( shouldAnalysisTrack( rhs.getType() ) ) {
1264 // transfer func, BUT
1265 // skip this node if it cannot create new reachability paths
1266 if( !arrayReferencees.doesNotCreateNewReaching( fsen ) ) {
1267 rg.assignTempXFieldFEqualToTempY( lhs, fdElement, rhs );
1271 // use transformed graph to do effects analysis
1272 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1273 effectsAnalysis.analyzeFlatSetFieldNode( rg, lhs, fdElement,
1279 FlatNew fnn = (FlatNew) fn;
1281 if( shouldAnalysisTrack( lhs.getType() ) ) {
1282 AllocSite as = getAllocSiteFromFlatNewPRIVATE( fnn );
1284 // before transform, support effects analysis
1285 if (doEffectsAnalysis && fmContaining != fmAnalysisEntry) {
1286 if (rblockStatus.isInCriticalRegion(fmContaining, fn)) {
1287 // after creating new object, lhs is accessible
1288 rg.makeAccessible(lhs);
1293 rg.assignTempEqualToNewAlloc( lhs, as );
1297 case FKind.FlatSESEEnterNode:
1298 sese = (FlatSESEEnterNode) fn;
1300 if( sese.getIsCallerSESEplaceholder() ) {
1301 // ignore these dummy rblocks!
1305 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1307 // always remove ALL stall site taints at enter
1308 rg.removeAllStallSiteTaints();
1310 // inject taints for in-set vars
1311 rg.taintInSetVars( sese );
1315 case FKind.FlatSESEExitNode:
1316 fsexn = (FlatSESEExitNode) fn;
1317 sese = fsexn.getFlatEnter();
1319 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1321 // @ sese exit make all live variables
1322 // inaccessible to later parent statements
1323 rg.makeInaccessible( liveness.getLiveInTemps( fmContaining, fn ) );
1325 // always remove ALL stall site taints at exit
1326 rg.removeAllStallSiteTaints();
1328 // remove in-set var taints for the exiting rblock
1329 rg.removeInContextTaints( sese );
1334 case FKind.FlatCall: {
1335 Descriptor mdCaller;
1336 if( fmContaining.getMethod() != null ){
1337 mdCaller = fmContaining.getMethod();
1339 mdCaller = fmContaining.getTask();
1341 FlatCall fc = (FlatCall) fn;
1342 MethodDescriptor mdCallee = fc.getMethod();
1343 FlatMethod fmCallee = state.getMethodFlat( mdCallee );
1346 boolean debugCallSite =
1347 mdCaller.getSymbol().equals( state.DISJOINTDEBUGCALLER ) &&
1348 mdCallee.getSymbol().equals( state.DISJOINTDEBUGCALLEE );
1350 boolean writeDebugDOTs = false;
1351 boolean stopAfter = false;
1352 if( debugCallSite ) {
1353 ++ReachGraph.debugCallSiteVisitCounter;
1354 System.out.println( " $$$ Debug call site visit "+
1355 ReachGraph.debugCallSiteVisitCounter+
1359 (ReachGraph.debugCallSiteVisitCounter >=
1360 ReachGraph.debugCallSiteVisitStartCapture) &&
1362 (ReachGraph.debugCallSiteVisitCounter <
1363 ReachGraph.debugCallSiteVisitStartCapture +
1364 ReachGraph.debugCallSiteNumVisitsToCapture)
1366 writeDebugDOTs = true;
1367 System.out.println( " $$$ Capturing this call site visit $$$" );
1368 if( ReachGraph.debugCallSiteStopAfter &&
1369 (ReachGraph.debugCallSiteVisitCounter ==
1370 ReachGraph.debugCallSiteVisitStartCapture +
1371 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
1379 // calculate the heap this call site can reach--note this is
1380 // not used for the current call site transform, we are
1381 // grabbing this heap model for future analysis of the callees,
1382 // so if different results emerge we will return to this site
1383 ReachGraph heapForThisCall_old =
1384 getIHMcontribution( mdCallee, fc );
1386 // the computation of the callee-reachable heap
1387 // is useful for making the callee starting point
1388 // and for applying the call site transfer function
1389 Set<Integer> callerNodeIDsCopiedToCallee =
1390 new HashSet<Integer>();
1392 ReachGraph heapForThisCall_cur =
1393 rg.makeCalleeView( fc,
1395 callerNodeIDsCopiedToCallee,
1399 if( !heapForThisCall_cur.equals( heapForThisCall_old ) ) {
1400 // if heap at call site changed, update the contribution,
1401 // and reschedule the callee for analysis
1402 addIHMcontribution( mdCallee, fc, heapForThisCall_cur );
1404 // map a FlatCall to its enclosing method/task descriptor
1405 // so we can write that info out later
1406 fc2enclosing.put( fc, mdCaller );
1408 if( state.DISJOINTDEBUGSCHEDULING ) {
1409 System.out.println( " context changed, scheduling callee: "+mdCallee );
1412 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1413 calleesToEnqueue.add( mdCallee );
1415 enqueue( mdCallee );
1420 // the transformation for a call site should update the
1421 // current heap abstraction with any effects from the callee,
1422 // or if the method is virtual, the effects from any possible
1423 // callees, so find the set of callees...
1424 Set<MethodDescriptor> setPossibleCallees;
1425 if( determinismDesired ) {
1426 // use an ordered set
1427 setPossibleCallees = new TreeSet<MethodDescriptor>( dComp );
1429 // otherwise use a speedy hashset
1430 setPossibleCallees = new HashSet<MethodDescriptor>();
1433 if( mdCallee.isStatic() ) {
1434 setPossibleCallees.add( mdCallee );
1436 TypeDescriptor typeDesc = fc.getThis().getType();
1437 setPossibleCallees.addAll( callGraph.getMethods( mdCallee,
1442 ReachGraph rgMergeOfPossibleCallers = new ReachGraph();
1444 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1445 while( mdItr.hasNext() ) {
1446 MethodDescriptor mdPossible = mdItr.next();
1447 FlatMethod fmPossible = state.getMethodFlat( mdPossible );
1449 addDependent( mdPossible, // callee
1452 // don't alter the working graph (rg) until we compute a
1453 // result for every possible callee, merge them all together,
1454 // then set rg to that
1455 ReachGraph rgPossibleCaller = new ReachGraph();
1456 rgPossibleCaller.merge( rg );
1458 ReachGraph rgPossibleCallee = getPartial( mdPossible );
1460 if( rgPossibleCallee == null ) {
1461 // if this method has never been analyzed just schedule it
1462 // for analysis and skip over this call site for now
1463 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1464 calleesToEnqueue.add( mdPossible );
1466 enqueue( mdPossible );
1469 if( state.DISJOINTDEBUGSCHEDULING ) {
1470 System.out.println( " callee hasn't been analyzed, scheduling: "+mdPossible );
1475 // calculate the method call transform
1476 rgPossibleCaller.resolveMethodCall( fc,
1479 callerNodeIDsCopiedToCallee,
1483 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1484 if( !rgPossibleCallee.isAccessible( ReachGraph.tdReturn ) ) {
1485 rgPossibleCaller.makeInaccessible( fc.getReturnTemp() );
1491 rgMergeOfPossibleCallers.merge( rgPossibleCaller );
1496 System.out.println( "$$$ Exiting after requested captures of call site. $$$" );
1501 // now that we've taken care of building heap models for
1502 // callee analysis, finish this transformation
1503 rg = rgMergeOfPossibleCallers;
1507 case FKind.FlatReturnNode:
1508 FlatReturnNode frn = (FlatReturnNode) fn;
1509 rhs = frn.getReturnTemp();
1511 // before transfer, do effects analysis support
1512 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1513 if(!rg.isAccessible(rhs)){
1514 rg.makeInaccessible(ReachGraph.tdReturn);
1518 if( rhs != null && shouldAnalysisTrack( rhs.getType() ) ) {
1519 rg.assignReturnEqualToTemp( rhs );
1522 setRetNodes.add( frn );
1528 // dead variables were removed before the above transfer function
1529 // was applied, so eliminate heap regions and edges that are no
1530 // longer part of the abstractly-live heap graph, and sweep up
1531 // and reachability effects that are altered by the reduction
1532 //rg.abstractGarbageCollect();
1536 // back edges are strictly monotonic
1537 if( pm.isBackEdge( fn ) ) {
1538 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get( fn );
1539 rg.merge( rgPrevResult );
1540 mapBackEdgeToMonotone.put( fn, rg );
1543 // at this point rg should be the correct update
1544 // by an above transfer function, or untouched if
1545 // the flat node type doesn't affect the heap
1551 // this method should generate integers strictly greater than zero!
1552 // special "shadow" regions are made from a heap region by negating
1554 static public Integer generateUniqueHeapRegionNodeID() {
1556 return new Integer( uniqueIDcount );
1561 static public FieldDescriptor getArrayField( TypeDescriptor tdElement ) {
1562 FieldDescriptor fdElement = mapTypeToArrayField.get( tdElement );
1563 if( fdElement == null ) {
1564 fdElement = new FieldDescriptor( new Modifiers( Modifiers.PUBLIC ),
1566 arrayElementFieldName,
1569 mapTypeToArrayField.put( tdElement, fdElement );
1576 private void writeFinalGraphs() {
1577 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
1578 Iterator itr = entrySet.iterator();
1579 while( itr.hasNext() ) {
1580 Map.Entry me = (Map.Entry) itr.next();
1581 Descriptor d = (Descriptor) me.getKey();
1582 ReachGraph rg = (ReachGraph) me.getValue();
1584 rg.writeGraph( "COMPLETE"+d,
1585 true, // write labels (variables)
1586 true, // selectively hide intermediate temp vars
1587 true, // prune unreachable heap regions
1588 false, // hide reachability altogether
1589 true, // hide subset reachability states
1590 true, // hide predicates
1591 false ); // hide edge taints
1595 private void writeFinalIHMs() {
1596 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
1597 while( d2IHMsItr.hasNext() ) {
1598 Map.Entry me1 = (Map.Entry) d2IHMsItr.next();
1599 Descriptor d = (Descriptor) me1.getKey();
1600 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>) me1.getValue();
1602 Iterator fc2rgItr = IHMs.entrySet().iterator();
1603 while( fc2rgItr.hasNext() ) {
1604 Map.Entry me2 = (Map.Entry) fc2rgItr.next();
1605 FlatCall fc = (FlatCall) me2.getKey();
1606 ReachGraph rg = (ReachGraph) me2.getValue();
1608 rg.writeGraph( "IHMPARTFOR"+d+"FROM"+fc2enclosing.get( fc )+fc,
1609 true, // write labels (variables)
1610 true, // selectively hide intermediate temp vars
1611 true, // hide reachability altogether
1612 true, // prune unreachable heap regions
1613 true, // hide subset reachability states
1614 false, // hide predicates
1615 true ); // hide edge taints
1620 private void writeInitialContexts() {
1621 Set entrySet = mapDescriptorToInitialContext.entrySet();
1622 Iterator itr = entrySet.iterator();
1623 while( itr.hasNext() ) {
1624 Map.Entry me = (Map.Entry) itr.next();
1625 Descriptor d = (Descriptor) me.getKey();
1626 ReachGraph rg = (ReachGraph) me.getValue();
1628 rg.writeGraph( "INITIAL"+d,
1629 true, // write labels (variables)
1630 true, // selectively hide intermediate temp vars
1631 true, // prune unreachable heap regions
1632 false, // hide all reachability
1633 true, // hide subset reachability states
1634 true, // hide predicates
1635 false );// hide edge taints
1640 protected ReachGraph getPartial( Descriptor d ) {
1641 return mapDescriptorToCompleteReachGraph.get( d );
1644 protected void setPartial( Descriptor d, ReachGraph rg ) {
1645 mapDescriptorToCompleteReachGraph.put( d, rg );
1647 // when the flag for writing out every partial
1648 // result is set, we should spit out the graph,
1649 // but in order to give it a unique name we need
1650 // to track how many partial results for this
1651 // descriptor we've already written out
1652 if( writeAllIncrementalDOTs ) {
1653 if( !mapDescriptorToNumUpdates.containsKey( d ) ) {
1654 mapDescriptorToNumUpdates.put( d, new Integer( 0 ) );
1656 Integer n = mapDescriptorToNumUpdates.get( d );
1658 rg.writeGraph( d+"COMPLETE"+String.format( "%05d", n ),
1659 true, // write labels (variables)
1660 true, // selectively hide intermediate temp vars
1661 true, // prune unreachable heap regions
1662 false, // hide all reachability
1663 true, // hide subset reachability states
1664 false, // hide predicates
1665 false); // hide edge taints
1667 mapDescriptorToNumUpdates.put( d, n + 1 );
1673 // return just the allocation site associated with one FlatNew node
1674 protected AllocSite getAllocSiteFromFlatNewPRIVATE( FlatNew fnew ) {
1676 boolean flagProgrammatically = false;
1677 if( sitesToFlag != null && sitesToFlag.contains( fnew ) ) {
1678 flagProgrammatically = true;
1681 if( !mapFlatNewToAllocSite.containsKey( fnew ) ) {
1682 AllocSite as = AllocSite.factory( allocationDepth,
1684 fnew.getDisjointId(),
1685 flagProgrammatically
1688 // the newest nodes are single objects
1689 for( int i = 0; i < allocationDepth; ++i ) {
1690 Integer id = generateUniqueHeapRegionNodeID();
1691 as.setIthOldest( i, id );
1692 mapHrnIdToAllocSite.put( id, as );
1695 // the oldest node is a summary node
1696 as.setSummary( generateUniqueHeapRegionNodeID() );
1698 mapFlatNewToAllocSite.put( fnew, as );
1701 return mapFlatNewToAllocSite.get( fnew );
1705 public static boolean shouldAnalysisTrack( TypeDescriptor type ) {
1706 // don't track primitive types, but an array
1707 // of primitives is heap memory
1708 if( type.isImmutable() ) {
1709 return type.isArray();
1712 // everything else is an object
1716 protected int numMethodsAnalyzed() {
1717 return descriptorsToAnalyze.size();
1724 // Take in source entry which is the program's compiled entry and
1725 // create a new analysis entry, a method that takes no parameters
1726 // and appears to allocate the command line arguments and call the
1727 // source entry with them. The purpose of this analysis entry is
1728 // to provide a top-level method context with no parameters left.
1729 protected void makeAnalysisEntryMethod( MethodDescriptor mdSourceEntry ) {
1731 Modifiers mods = new Modifiers();
1732 mods.addModifier( Modifiers.PUBLIC );
1733 mods.addModifier( Modifiers.STATIC );
1735 TypeDescriptor returnType =
1736 new TypeDescriptor( TypeDescriptor.VOID );
1738 this.mdAnalysisEntry =
1739 new MethodDescriptor( mods,
1741 "analysisEntryMethod"
1744 TempDescriptor cmdLineArgs =
1745 new TempDescriptor( "args",
1746 mdSourceEntry.getParamType( 0 )
1750 new FlatNew( mdSourceEntry.getParamType( 0 ),
1755 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
1756 sourceEntryArgs[0] = cmdLineArgs;
1759 new FlatCall( mdSourceEntry,
1765 FlatReturnNode frn = new FlatReturnNode( null );
1767 FlatExit fe = new FlatExit();
1769 this.fmAnalysisEntry =
1770 new FlatMethod( mdAnalysisEntry,
1774 this.fmAnalysisEntry.addNext( fn );
1781 protected LinkedList<Descriptor> topologicalSort( Set<Descriptor> toSort ) {
1783 Set<Descriptor> discovered;
1785 if( determinismDesired ) {
1786 // use an ordered set
1787 discovered = new TreeSet<Descriptor>( dComp );
1789 // otherwise use a speedy hashset
1790 discovered = new HashSet<Descriptor>();
1793 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
1795 Iterator<Descriptor> itr = toSort.iterator();
1796 while( itr.hasNext() ) {
1797 Descriptor d = itr.next();
1799 if( !discovered.contains( d ) ) {
1800 dfsVisit( d, toSort, sorted, discovered );
1807 // While we're doing DFS on call graph, remember
1808 // dependencies for efficient queuing of methods
1809 // during interprocedural analysis:
1811 // a dependent of a method decriptor d for this analysis is:
1812 // 1) a method or task that invokes d
1813 // 2) in the descriptorsToAnalyze set
1814 protected void dfsVisit( Descriptor d,
1815 Set <Descriptor> toSort,
1816 LinkedList<Descriptor> sorted,
1817 Set <Descriptor> discovered ) {
1818 discovered.add( d );
1820 // only methods have callers, tasks never do
1821 if( d instanceof MethodDescriptor ) {
1823 MethodDescriptor md = (MethodDescriptor) d;
1825 // the call graph is not aware that we have a fabricated
1826 // analysis entry that calls the program source's entry
1827 if( md == mdSourceEntry ) {
1828 if( !discovered.contains( mdAnalysisEntry ) ) {
1829 addDependent( mdSourceEntry, // callee
1830 mdAnalysisEntry // caller
1832 dfsVisit( mdAnalysisEntry, toSort, sorted, discovered );
1836 // otherwise call graph guides DFS
1837 Iterator itr = callGraph.getCallerSet( md ).iterator();
1838 while( itr.hasNext() ) {
1839 Descriptor dCaller = (Descriptor) itr.next();
1841 // only consider callers in the original set to analyze
1842 if( !toSort.contains( dCaller ) ) {
1846 if( !discovered.contains( dCaller ) ) {
1847 addDependent( md, // callee
1851 dfsVisit( dCaller, toSort, sorted, discovered );
1856 // for leaf-nodes last now!
1857 sorted.addLast( d );
1861 protected void enqueue( Descriptor d ) {
1863 if( !descriptorsToVisitSet.contains( d ) ) {
1865 if( state.DISJOINTDVISITSTACK ||
1866 state.DISJOINTDVISITSTACKEESONTOP
1868 descriptorsToVisitStack.add( d );
1870 } else if( state.DISJOINTDVISITPQUE ) {
1871 Integer priority = mapDescriptorToPriority.get( d );
1872 descriptorsToVisitQ.add( new DescriptorQWrapper( priority,
1877 descriptorsToVisitSet.add( d );
1882 // a dependent of a method decriptor d for this analysis is:
1883 // 1) a method or task that invokes d
1884 // 2) in the descriptorsToAnalyze set
1885 protected void addDependent( Descriptor callee, Descriptor caller ) {
1886 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
1887 if( deps == null ) {
1888 deps = new HashSet<Descriptor>();
1891 mapDescriptorToSetDependents.put( callee, deps );
1894 protected Set<Descriptor> getDependents( Descriptor callee ) {
1895 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
1896 if( deps == null ) {
1897 deps = new HashSet<Descriptor>();
1898 mapDescriptorToSetDependents.put( callee, deps );
1904 public Hashtable<FlatCall, ReachGraph> getIHMcontributions( Descriptor d ) {
1906 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1907 mapDescriptorToIHMcontributions.get( d );
1909 if( heapsFromCallers == null ) {
1910 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
1911 mapDescriptorToIHMcontributions.put( d, heapsFromCallers );
1914 return heapsFromCallers;
1917 public ReachGraph getIHMcontribution( Descriptor d,
1920 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1921 getIHMcontributions( d );
1923 if( !heapsFromCallers.containsKey( fc ) ) {
1927 return heapsFromCallers.get( fc );
1931 public void addIHMcontribution( Descriptor d,
1935 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1936 getIHMcontributions( d );
1938 heapsFromCallers.put( fc, rg );
1942 private AllocSite createParameterAllocSite( ReachGraph rg,
1943 TempDescriptor tempDesc,
1949 flatNew = new FlatNew( tempDesc.getType(), // type
1950 tempDesc, // param temp
1951 false, // global alloc?
1952 "param"+tempDesc // disjoint site ID string
1955 flatNew = new FlatNew( tempDesc.getType(), // type
1956 tempDesc, // param temp
1957 false, // global alloc?
1958 null // disjoint site ID string
1962 // create allocation site
1963 AllocSite as = AllocSite.factory( allocationDepth,
1965 flatNew.getDisjointId(),
1968 for (int i = 0; i < allocationDepth; ++i) {
1969 Integer id = generateUniqueHeapRegionNodeID();
1970 as.setIthOldest(i, id);
1971 mapHrnIdToAllocSite.put(id, as);
1973 // the oldest node is a summary node
1974 as.setSummary( generateUniqueHeapRegionNodeID() );
1982 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc){
1984 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
1985 if(!typeDesc.isImmutable()){
1986 ClassDescriptor classDesc = typeDesc.getClassDesc();
1987 for (Iterator it = classDesc.getFields(); it.hasNext();) {
1988 FieldDescriptor field = (FieldDescriptor) it.next();
1989 TypeDescriptor fieldType = field.getType();
1990 if (shouldAnalysisTrack( fieldType )) {
1991 fieldSet.add(field);
1999 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha ){
2001 int dimCount=fd.getType().getArrayCount();
2002 HeapRegionNode prevNode=null;
2003 HeapRegionNode arrayEntryNode=null;
2004 for(int i=dimCount;i>0;i--){
2005 TypeDescriptor typeDesc=fd.getType().dereference();//hack to get instance of type desc
2006 typeDesc.setArrayCount(i);
2007 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
2008 HeapRegionNode hrnSummary ;
2009 if(!mapToExistingNode.containsKey(typeDesc)){
2014 as = createParameterAllocSite(rg, tempDesc, false);
2016 // make a new reference to allocated node
2018 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2019 false, // single object?
2021 false, // out-of-context?
2022 as.getType(), // type
2023 as, // allocation site
2024 alpha, // inherent reach
2025 alpha, // current reach
2026 ExistPredSet.factory(rg.predTrue), // predicates
2027 tempDesc.toString() // description
2029 rg.id2hrn.put(as.getSummary(),hrnSummary);
2031 mapToExistingNode.put(typeDesc, hrnSummary);
2033 hrnSummary=mapToExistingNode.get(typeDesc);
2037 // make a new reference between new summary node and source
2038 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2041 fd.getSymbol(), // field name
2043 ExistPredSet.factory(rg.predTrue), // predicates
2047 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2048 prevNode=hrnSummary;
2049 arrayEntryNode=hrnSummary;
2051 // make a new reference between summary nodes of array
2052 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2055 arrayElementFieldName, // field name
2057 ExistPredSet.factory(rg.predTrue), // predicates
2061 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2062 prevNode=hrnSummary;
2067 // create a new obj node if obj has at least one non-primitive field
2068 TypeDescriptor type=fd.getType();
2069 if(getFieldSetTobeAnalyzed(type).size()>0){
2070 TypeDescriptor typeDesc=type.dereference();
2071 typeDesc.setArrayCount(0);
2072 if(!mapToExistingNode.containsKey(typeDesc)){
2073 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
2074 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
2075 // make a new reference to allocated node
2076 HeapRegionNode hrnSummary =
2077 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2078 false, // single object?
2080 false, // out-of-context?
2082 as, // allocation site
2083 alpha, // inherent reach
2084 alpha, // current reach
2085 ExistPredSet.factory(rg.predTrue), // predicates
2086 tempDesc.toString() // description
2088 rg.id2hrn.put(as.getSummary(),hrnSummary);
2089 mapToExistingNode.put(typeDesc, hrnSummary);
2090 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2093 arrayElementFieldName, // field name
2095 ExistPredSet.factory(rg.predTrue), // predicates
2098 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2099 prevNode=hrnSummary;
2101 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
2102 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null){
2103 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2106 arrayElementFieldName, // field name
2108 ExistPredSet.factory(rg.predTrue), // predicates
2111 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2113 prevNode=hrnSummary;
2117 map.put(arrayEntryNode, prevNode);
2118 return arrayEntryNode;
2121 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
2122 ReachGraph rg = new ReachGraph();
2123 TaskDescriptor taskDesc = fm.getTask();
2125 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
2126 Descriptor paramDesc = taskDesc.getParameter(idx);
2127 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
2129 // setup data structure
2130 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
2131 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
2132 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
2133 new Hashtable<TypeDescriptor, HeapRegionNode>();
2134 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
2135 new Hashtable<HeapRegionNode, HeapRegionNode>();
2136 Set<String> doneSet = new HashSet<String>();
2138 TempDescriptor tempDesc = fm.getParameter(idx);
2140 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
2141 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
2142 Integer idNewest = as.getIthOldest(0);
2143 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
2145 // make a new reference to allocated node
2146 RefEdge edgeNew = new RefEdge(lnX, // source
2148 taskDesc.getParamType(idx), // type
2150 hrnNewest.getAlpha(), // beta
2151 ExistPredSet.factory(rg.predTrue), // predicates
2154 rg.addRefEdge(lnX, hrnNewest, edgeNew);
2156 // set-up a work set for class field
2157 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
2158 for (Iterator it = classDesc.getFields(); it.hasNext();) {
2159 FieldDescriptor fd = (FieldDescriptor) it.next();
2160 TypeDescriptor fieldType = fd.getType();
2161 if (shouldAnalysisTrack( fieldType )) {
2162 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
2163 newMap.put(hrnNewest, fd);
2164 workSet.add(newMap);
2168 int uniqueIdentifier = 0;
2169 while (!workSet.isEmpty()) {
2170 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
2172 workSet.remove(map);
2174 Set<HeapRegionNode> key = map.keySet();
2175 HeapRegionNode srcHRN = key.iterator().next();
2176 FieldDescriptor fd = map.get(srcHRN);
2177 TypeDescriptor type = fd.getType();
2178 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
2180 if (!doneSet.contains(doneSetIdentifier)) {
2181 doneSet.add(doneSetIdentifier);
2182 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
2183 // create new summary Node
2184 TempDescriptor td = new TempDescriptor("temp"
2185 + uniqueIdentifier, type);
2187 AllocSite allocSite;
2188 if(type.equals(paramTypeDesc)){
2189 //corresponding allocsite has already been created for a parameter variable.
2192 allocSite = createParameterAllocSite(rg, td, false);
2194 String strDesc = allocSite.toStringForDOT()
2196 TypeDescriptor allocType=allocSite.getType();
2198 HeapRegionNode hrnSummary;
2199 if(allocType.isArray() && allocType.getArrayCount()>0){
2200 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
2203 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
2204 false, // single object?
2206 false, // out-of-context?
2207 allocSite.getType(), // type
2208 allocSite, // allocation site
2209 hrnNewest.getAlpha(), // inherent reach
2210 hrnNewest.getAlpha(), // current reach
2211 ExistPredSet.factory(rg.predTrue), // predicates
2212 strDesc // description
2214 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
2216 // make a new reference to summary node
2217 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2220 fd.getSymbol(), // field name
2221 hrnNewest.getAlpha(), // beta
2222 ExistPredSet.factory(rg.predTrue), // predicates
2226 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2230 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2232 // set-up a work set for fields of the class
2233 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2234 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2236 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2238 HeapRegionNode newDstHRN;
2239 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)){
2240 //related heap region node is already exsited.
2241 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2243 newDstHRN=hrnSummary;
2245 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2246 if(!doneSet.contains(doneSetIdentifier)){
2247 // add new work item
2248 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2249 new HashMap<HeapRegionNode, FieldDescriptor>();
2250 newMap.put(newDstHRN, fieldDescriptor);
2251 workSet.add(newMap);
2256 // if there exists corresponding summary node
2257 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2259 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2261 fd.getType(), // type
2262 fd.getSymbol(), // field name
2263 srcHRN.getAlpha(), // beta
2264 ExistPredSet.factory(rg.predTrue), // predicates
2267 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2273 // debugSnapshot(rg, fm, true);
2277 // return all allocation sites in the method (there is one allocation
2278 // site per FlatNew node in a method)
2279 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2280 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2281 buildAllocationSiteSet(d);
2284 return mapDescriptorToAllocSiteSet.get(d);
2288 private void buildAllocationSiteSet(Descriptor d) {
2289 HashSet<AllocSite> s = new HashSet<AllocSite>();
2292 if( d instanceof MethodDescriptor ) {
2293 fm = state.getMethodFlat( (MethodDescriptor) d);
2295 assert d instanceof TaskDescriptor;
2296 fm = state.getMethodFlat( (TaskDescriptor) d);
2298 pm.analyzeMethod(fm);
2300 // visit every node in this FlatMethod's IR graph
2301 // and make a set of the allocation sites from the
2302 // FlatNew node's visited
2303 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2304 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2307 while( !toVisit.isEmpty() ) {
2308 FlatNode n = toVisit.iterator().next();
2310 if( n instanceof FlatNew ) {
2311 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2317 for( int i = 0; i < pm.numNext(n); ++i ) {
2318 FlatNode child = pm.getNext(n, i);
2319 if( !visited.contains(child) ) {
2325 mapDescriptorToAllocSiteSet.put(d, s);
2328 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
2330 HashSet<AllocSite> out = new HashSet<AllocSite>();
2331 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2332 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2336 while (!toVisit.isEmpty()) {
2337 Descriptor d = toVisit.iterator().next();
2341 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2342 Iterator asItr = asSet.iterator();
2343 while (asItr.hasNext()) {
2344 AllocSite as = (AllocSite) asItr.next();
2345 if (as.getDisjointAnalysisId() != null) {
2350 // enqueue callees of this method to be searched for
2351 // allocation sites also
2352 Set callees = callGraph.getCalleeSet(d);
2353 if (callees != null) {
2354 Iterator methItr = callees.iterator();
2355 while (methItr.hasNext()) {
2356 MethodDescriptor md = (MethodDescriptor) methItr.next();
2358 if (!visited.contains(md)) {
2369 private HashSet<AllocSite>
2370 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
2372 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
2373 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2374 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2378 // traverse this task and all methods reachable from this task
2379 while( !toVisit.isEmpty() ) {
2380 Descriptor d = toVisit.iterator().next();
2384 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2385 Iterator asItr = asSet.iterator();
2386 while( asItr.hasNext() ) {
2387 AllocSite as = (AllocSite) asItr.next();
2388 TypeDescriptor typed = as.getType();
2389 if( typed != null ) {
2390 ClassDescriptor cd = typed.getClassDesc();
2391 if( cd != null && cd.hasFlags() ) {
2397 // enqueue callees of this method to be searched for
2398 // allocation sites also
2399 Set callees = callGraph.getCalleeSet(d);
2400 if( callees != null ) {
2401 Iterator methItr = callees.iterator();
2402 while( methItr.hasNext() ) {
2403 MethodDescriptor md = (MethodDescriptor) methItr.next();
2405 if( !visited.contains(md) ) {
2415 public Set<Descriptor> getDescriptorsToAnalyze() {
2416 return descriptorsToAnalyze;
2419 public EffectsAnalysis getEffectsAnalysis(){
2420 return effectsAnalysis;
2424 // get successive captures of the analysis state, use compiler
2426 boolean takeDebugSnapshots = false;
2427 String descSymbolDebug = null;
2428 boolean stopAfterCapture = false;
2429 int snapVisitCounter = 0;
2430 int snapNodeCounter = 0;
2431 int visitStartCapture = 0;
2432 int numVisitsToCapture = 0;
2435 void debugSnapshot( ReachGraph rg, FlatNode fn, boolean in ) {
2436 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
2444 if( snapVisitCounter >= visitStartCapture ) {
2445 System.out.println( " @@@ snapping visit="+snapVisitCounter+
2446 ", node="+snapNodeCounter+
2450 graphName = String.format( "snap%03d_%04din",
2454 graphName = String.format( "snap%03d_%04dout",
2459 graphName = graphName + fn;
2461 rg.writeGraph( graphName,
2462 true, // write labels (variables)
2463 true, // selectively hide intermediate temp vars
2464 true, // prune unreachable heap regions
2465 false, // hide reachability
2466 true, // hide subset reachability states
2467 true, // hide predicates
2468 false );// hide edge taints