1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
6 import Analysis.OoOJava.RBlockRelationAnalysis;
7 import Analysis.OoOJava.RBlockStatusAnalysis;
10 import IR.Tree.Modifiers;
15 public class DisjointAnalysis {
17 ///////////////////////////////////////////
19 // Public interface to discover possible
20 // sharing in the program under analysis
22 ///////////////////////////////////////////
24 // if an object allocated at the target site may be
25 // reachable from both an object from root1 and an
26 // object allocated at root2, return TRUE
27 public boolean mayBothReachTarget( FlatMethod fm,
32 AllocSite asr1 = getAllocationSiteFromFlatNew( fnRoot1 );
33 AllocSite asr2 = getAllocationSiteFromFlatNew( fnRoot2 );
34 assert asr1.isFlagged();
35 assert asr2.isFlagged();
37 AllocSite ast = getAllocationSiteFromFlatNew( fnTarget );
38 ReachGraph rg = getPartial( fm.getMethod() );
40 return rg.mayBothReachTarget( asr1, asr2, ast );
43 // similar to the method above, return TRUE if ever
44 // more than one object from the root allocation site
45 // may reach an object from the target site
46 public boolean mayManyReachTarget( FlatMethod fm,
50 AllocSite asr = getAllocationSiteFromFlatNew( fnRoot );
51 assert asr.isFlagged();
53 AllocSite ast = getAllocationSiteFromFlatNew( fnTarget );
54 ReachGraph rg = getPartial( fm.getMethod() );
56 return rg.mayManyReachTarget( asr, ast );
62 public HashSet<AllocSite>
63 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
64 checkAnalysisComplete();
65 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
68 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
69 checkAnalysisComplete();
70 return getAllocSiteFromFlatNewPRIVATE(fn);
73 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
74 checkAnalysisComplete();
75 return mapHrnIdToAllocSite.get(id);
78 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
81 checkAnalysisComplete();
82 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
83 FlatMethod fm=state.getMethodFlat(taskOrMethod);
85 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
88 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
89 int paramIndex, AllocSite alloc) {
90 checkAnalysisComplete();
91 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
92 FlatMethod fm=state.getMethodFlat(taskOrMethod);
94 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
97 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
98 AllocSite alloc, int paramIndex) {
99 checkAnalysisComplete();
100 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
101 FlatMethod fm=state.getMethodFlat(taskOrMethod);
103 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
106 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
107 AllocSite alloc1, AllocSite alloc2) {
108 checkAnalysisComplete();
109 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
111 return rg.mayReachSharedObjects(alloc1, alloc2);
114 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
115 checkAnalysisComplete();
119 Iterator<HeapRegionNode> i = s.iterator();
120 while (i.hasNext()) {
121 HeapRegionNode n = i.next();
123 AllocSite as = n.getAllocSite();
125 out += " " + n.toString() + ",\n";
127 out += " " + n.toString() + ": " + as.toStringVerbose()
136 // use the methods given above to check every possible sharing class
137 // between task parameters and flagged allocation sites reachable
139 public void writeAllSharing(String outputFile,
142 boolean tabularOutput,
145 throws java.io.IOException {
146 checkAnalysisComplete();
148 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
150 if (!tabularOutput) {
151 bw.write("Conducting ownership analysis with allocation depth = "
152 + allocationDepth + "\n");
153 bw.write(timeReport + "\n");
158 // look through every task for potential sharing
159 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
160 while (taskItr.hasNext()) {
161 TaskDescriptor td = (TaskDescriptor) taskItr.next();
163 if (!tabularOutput) {
164 bw.write("\n---------" + td + "--------\n");
167 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
169 Set<HeapRegionNode> common;
171 // for each task parameter, check for sharing classes with
172 // other task parameters and every allocation site
173 // reachable from this task
174 boolean foundSomeSharing = false;
176 FlatMethod fm = state.getMethodFlat(td);
177 for (int i = 0; i < fm.numParameters(); ++i) {
179 // skip parameters with types that cannot reference
181 if( !shouldAnalysisTrack( fm.getParameter( i ).getType() ) ) {
185 // for the ith parameter check for sharing classes to all
186 // higher numbered parameters
187 for (int j = i + 1; j < fm.numParameters(); ++j) {
189 // skip parameters with types that cannot reference
191 if( !shouldAnalysisTrack( fm.getParameter( j ).getType() ) ) {
196 common = hasPotentialSharing(td, i, j);
197 if (!common.isEmpty()) {
198 foundSomeSharing = true;
200 if (!tabularOutput) {
201 bw.write("Potential sharing between parameters " + i
202 + " and " + j + ".\n");
203 bw.write(prettyPrintNodeSet(common) + "\n");
208 // for the ith parameter, check for sharing classes against
209 // the set of allocation sites reachable from this
211 Iterator allocItr = allocSites.iterator();
212 while (allocItr.hasNext()) {
213 AllocSite as = (AllocSite) allocItr.next();
214 common = hasPotentialSharing(td, i, as);
215 if (!common.isEmpty()) {
216 foundSomeSharing = true;
218 if (!tabularOutput) {
219 bw.write("Potential sharing between parameter " + i
220 + " and " + as.getFlatNew() + ".\n");
221 bw.write(prettyPrintNodeSet(common) + "\n");
227 // for each allocation site check for sharing classes with
228 // other allocation sites in the context of execution
230 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
231 Iterator allocItr1 = allocSites.iterator();
232 while (allocItr1.hasNext()) {
233 AllocSite as1 = (AllocSite) allocItr1.next();
235 Iterator allocItr2 = allocSites.iterator();
236 while (allocItr2.hasNext()) {
237 AllocSite as2 = (AllocSite) allocItr2.next();
239 if (!outerChecked.contains(as2)) {
240 common = hasPotentialSharing(td, as1, as2);
242 if (!common.isEmpty()) {
243 foundSomeSharing = true;
245 if (!tabularOutput) {
246 bw.write("Potential sharing between "
247 + as1.getFlatNew() + " and "
248 + as2.getFlatNew() + ".\n");
249 bw.write(prettyPrintNodeSet(common) + "\n");
255 outerChecked.add(as1);
258 if (!foundSomeSharing) {
259 if (!tabularOutput) {
260 bw.write("No sharing between flagged objects in Task " + td
268 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
269 + " & " + numMethodsAnalyzed() + " \\\\\n");
271 bw.write("\nNumber sharing classes: "+numSharing);
279 // this version of writeAllSharing is for Java programs that have no tasks
280 // ***********************************
281 // WARNING: THIS DOES NOT DO THE RIGHT THING, REPORTS 0 ALWAYS!
282 // It should use mayBothReachTarget and mayManyReachTarget like
283 // OoOJava does to query analysis results
284 // ***********************************
285 public void writeAllSharingJava(String outputFile,
288 boolean tabularOutput,
291 throws java.io.IOException {
292 checkAnalysisComplete();
298 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
300 bw.write("Conducting disjoint reachability analysis with allocation depth = "
301 + allocationDepth + "\n");
302 bw.write(timeReport + "\n\n");
304 boolean foundSomeSharing = false;
306 Descriptor d = typeUtil.getMain();
307 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
309 // for each allocation site check for sharing classes with
310 // other allocation sites in the context of execution
312 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
313 Iterator allocItr1 = allocSites.iterator();
314 while (allocItr1.hasNext()) {
315 AllocSite as1 = (AllocSite) allocItr1.next();
317 Iterator allocItr2 = allocSites.iterator();
318 while (allocItr2.hasNext()) {
319 AllocSite as2 = (AllocSite) allocItr2.next();
321 if (!outerChecked.contains(as2)) {
322 Set<HeapRegionNode> common = hasPotentialSharing(d,
325 if (!common.isEmpty()) {
326 foundSomeSharing = true;
327 bw.write("Potential sharing between "
328 + as1.getDisjointAnalysisId() + " and "
329 + as2.getDisjointAnalysisId() + ".\n");
330 bw.write(prettyPrintNodeSet(common) + "\n");
336 outerChecked.add(as1);
339 if (!foundSomeSharing) {
340 bw.write("No sharing classes between flagged objects found.\n");
342 bw.write("\nNumber sharing classes: "+numSharing);
345 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
350 ///////////////////////////////////////////
352 // end public interface
354 ///////////////////////////////////////////
358 protected void checkAnalysisComplete() {
359 if( !analysisComplete ) {
360 throw new Error("Warning: public interface method called while analysis is running.");
369 // run in faster mode, only when bugs wrung out!
370 public static boolean releaseMode;
372 // use command line option to set this, analysis
373 // should attempt to be deterministic
374 public static boolean determinismDesired;
376 // when we want to enforce determinism in the
377 // analysis we need to sort descriptors rather
378 // than toss them in efficient sets, use this
379 public static DescriptorComparator dComp =
380 new DescriptorComparator();
383 // data from the compiler
385 public CallGraph callGraph;
386 public Liveness liveness;
387 public ArrayReferencees arrayReferencees;
388 public RBlockRelationAnalysis rblockRel;
389 public RBlockStatusAnalysis rblockStatus;
390 public TypeUtil typeUtil;
391 public int allocationDepth;
393 protected boolean doEffectsAnalysis = false;
394 protected EffectsAnalysis effectsAnalysis;
396 // data structure for public interface
397 private Hashtable< Descriptor, HashSet<AllocSite> >
398 mapDescriptorToAllocSiteSet;
401 // for public interface methods to warn that they
402 // are grabbing results during analysis
403 private boolean analysisComplete;
406 // used to identify HeapRegionNode objects
407 // A unique ID equates an object in one
408 // ownership graph with an object in another
409 // graph that logically represents the same
411 // start at 10 and increment to reserve some
412 // IDs for special purposes
413 static protected int uniqueIDcount = 10;
416 // An out-of-scope method created by the
417 // analysis that has no parameters, and
418 // appears to allocate the command line
419 // arguments, then invoke the source code's
420 // main method. The purpose of this is to
421 // provide the analysis with an explicit
422 // top-level context with no parameters
423 protected MethodDescriptor mdAnalysisEntry;
424 protected FlatMethod fmAnalysisEntry;
426 // main method defined by source program
427 protected MethodDescriptor mdSourceEntry;
429 // the set of task and/or method descriptors
430 // reachable in call graph
431 protected Set<Descriptor>
432 descriptorsToAnalyze;
434 // current descriptors to visit in fixed-point
435 // interprocedural analysis, prioritized by
436 // dependency in the call graph
437 protected Stack<Descriptor>
438 descriptorsToVisitStack;
439 protected PriorityQueue<DescriptorQWrapper>
442 // a duplication of the above structure, but
443 // for efficient testing of inclusion
444 protected HashSet<Descriptor>
445 descriptorsToVisitSet;
447 // storage for priorities (doesn't make sense)
448 // to add it to the Descriptor class, just in
450 protected Hashtable<Descriptor, Integer>
451 mapDescriptorToPriority;
453 // when analyzing a method and scheduling more:
454 // remember set of callee's enqueued for analysis
455 // so they can be put on top of the callers in
456 // the stack-visit mode
457 protected Set<Descriptor>
460 // maps a descriptor to its current partial result
461 // from the intraprocedural fixed-point analysis--
462 // then the interprocedural analysis settles, this
463 // mapping will have the final results for each
465 protected Hashtable<Descriptor, ReachGraph>
466 mapDescriptorToCompleteReachGraph;
468 // maps a descriptor to its known dependents: namely
469 // methods or tasks that call the descriptor's method
470 // AND are part of this analysis (reachable from main)
471 protected Hashtable< Descriptor, Set<Descriptor> >
472 mapDescriptorToSetDependents;
474 // if the analysis client wants to flag allocation sites
475 // programmatically, it should provide a set of FlatNew
476 // statements--this may be null if unneeded
477 protected Set<FlatNew> sitesToFlag;
479 // maps each flat new to one analysis abstraction
480 // allocate site object, these exist outside reach graphs
481 protected Hashtable<FlatNew, AllocSite>
482 mapFlatNewToAllocSite;
484 // maps intergraph heap region IDs to intergraph
485 // allocation sites that created them, a redundant
486 // structure for efficiency in some operations
487 protected Hashtable<Integer, AllocSite>
490 // maps a method to its initial heap model (IHM) that
491 // is the set of reachability graphs from every caller
492 // site, all merged together. The reason that we keep
493 // them separate is that any one call site's contribution
494 // to the IHM may changed along the path to the fixed point
495 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
496 mapDescriptorToIHMcontributions;
498 // additionally, keep a mapping from descriptors to the
499 // merged in-coming initial context, because we want this
500 // initial context to be STRICTLY MONOTONIC
501 protected Hashtable<Descriptor, ReachGraph>
502 mapDescriptorToInitialContext;
504 // make the result for back edges analysis-wide STRICTLY
505 // MONOTONIC as well, but notice we use FlatNode as the
506 // key for this map: in case we want to consider other
507 // nodes as back edge's in future implementations
508 protected Hashtable<FlatNode, ReachGraph>
509 mapBackEdgeToMonotone;
512 public static final String arrayElementFieldName = "___element_";
513 static protected Hashtable<TypeDescriptor, FieldDescriptor>
516 // for controlling DOT file output
517 protected boolean writeFinalDOTs;
518 protected boolean writeAllIncrementalDOTs;
520 // supporting DOT output--when we want to write every
521 // partial method result, keep a tally for generating
523 protected Hashtable<Descriptor, Integer>
524 mapDescriptorToNumUpdates;
526 //map task descriptor to initial task parameter
527 protected Hashtable<Descriptor, ReachGraph>
528 mapDescriptorToReachGraph;
530 protected PointerMethod pm;
532 //Keeps track of all the reach graphs at every program point
533 //DO NOT USE UNLESS YOU REALLY NEED IT
534 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtEnter =
535 new Hashtable<FlatNode, ReachGraph>();
537 private Hashtable<FlatCall, Descriptor> fc2enclosing;
540 // allocate various structures that are not local
541 // to a single class method--should be done once
542 protected void allocateStructures() {
544 if( determinismDesired ) {
545 // use an ordered set
546 descriptorsToAnalyze = new TreeSet<Descriptor>( dComp );
548 // otherwise use a speedy hashset
549 descriptorsToAnalyze = new HashSet<Descriptor>();
552 mapDescriptorToCompleteReachGraph =
553 new Hashtable<Descriptor, ReachGraph>();
555 mapDescriptorToNumUpdates =
556 new Hashtable<Descriptor, Integer>();
558 mapDescriptorToSetDependents =
559 new Hashtable< Descriptor, Set<Descriptor> >();
561 mapFlatNewToAllocSite =
562 new Hashtable<FlatNew, AllocSite>();
564 mapDescriptorToIHMcontributions =
565 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
567 mapDescriptorToInitialContext =
568 new Hashtable<Descriptor, ReachGraph>();
570 mapBackEdgeToMonotone =
571 new Hashtable<FlatNode, ReachGraph>();
573 mapHrnIdToAllocSite =
574 new Hashtable<Integer, AllocSite>();
576 mapTypeToArrayField =
577 new Hashtable <TypeDescriptor, FieldDescriptor>();
579 if( state.DISJOINTDVISITSTACK ||
580 state.DISJOINTDVISITSTACKEESONTOP
582 descriptorsToVisitStack =
583 new Stack<Descriptor>();
586 if( state.DISJOINTDVISITPQUE ) {
587 descriptorsToVisitQ =
588 new PriorityQueue<DescriptorQWrapper>();
591 descriptorsToVisitSet =
592 new HashSet<Descriptor>();
594 mapDescriptorToPriority =
595 new Hashtable<Descriptor, Integer>();
598 new HashSet<Descriptor>();
600 mapDescriptorToAllocSiteSet =
601 new Hashtable<Descriptor, HashSet<AllocSite> >();
603 mapDescriptorToReachGraph =
604 new Hashtable<Descriptor, ReachGraph>();
606 pm = new PointerMethod();
608 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
613 // this analysis generates a disjoint reachability
614 // graph for every reachable method in the program
615 public DisjointAnalysis( State s,
620 Set<FlatNew> sitesToFlag,
621 RBlockRelationAnalysis rra,
622 RBlockStatusAnalysis rsa
624 init( s, tu, cg, l, ar, sitesToFlag, rra, rsa, false );
627 public DisjointAnalysis( State s,
632 Set<FlatNew> sitesToFlag,
633 RBlockRelationAnalysis rra,
634 RBlockStatusAnalysis rsa,
635 boolean suppressOutput
637 init( s, tu, cg, l, ar, sitesToFlag, rra, rsa, suppressOutput );
640 protected void init( State state,
644 ArrayReferencees arrayReferencees,
645 Set<FlatNew> sitesToFlag,
646 RBlockRelationAnalysis rra,
647 RBlockStatusAnalysis rsa,
648 boolean suppressOutput
651 analysisComplete = false;
654 this.typeUtil = typeUtil;
655 this.callGraph = callGraph;
656 this.liveness = liveness;
657 this.arrayReferencees = arrayReferencees;
658 this.sitesToFlag = sitesToFlag;
659 this.rblockRel = rra;
660 this.rblockStatus = rsa;
662 if( rblockRel != null ) {
663 doEffectsAnalysis = true;
664 effectsAnalysis = new EffectsAnalysis();
667 this.allocationDepth = state.DISJOINTALLOCDEPTH;
668 this.releaseMode = state.DISJOINTRELEASEMODE;
669 this.determinismDesired = state.DISJOINTDETERMINISM;
671 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL && !suppressOutput;
672 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL && !suppressOutput;
674 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
675 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
676 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
677 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
678 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
679 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
680 this.snapNodeCounter = 0; // count nodes from 0
683 state.DISJOINTDVISITSTACK ||
684 state.DISJOINTDVISITPQUE ||
685 state.DISJOINTDVISITSTACKEESONTOP;
686 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
687 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
688 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
690 // set some static configuration for ReachGraphs
691 ReachGraph.allocationDepth = allocationDepth;
692 ReachGraph.typeUtil = typeUtil;
694 ReachGraph.debugCallSiteVisitStartCapture
695 = state.DISJOINTDEBUGCALLVISITTOSTART;
697 ReachGraph.debugCallSiteNumVisitsToCapture
698 = state.DISJOINTDEBUGCALLNUMVISITS;
700 ReachGraph.debugCallSiteStopAfter
701 = state.DISJOINTDEBUGCALLSTOPAFTER;
703 ReachGraph.debugCallSiteVisitCounter
704 = 0; // count visits from 1, is incremented before first visit
708 allocateStructures();
710 double timeStartAnalysis = (double) System.nanoTime();
712 // start interprocedural fixed-point computation
715 } catch( IOException e ) {
716 throw new Error( "IO Exception while writing disjointness analysis output." );
719 analysisComplete=true;
722 double timeEndAnalysis = (double) System.nanoTime();
723 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow( 10.0, 9.0 ) );
726 if( sitesToFlag != null ) {
727 treport = String.format( "Disjoint reachability analysis flagged %d sites and took %.3f sec.", sitesToFlag.size(), dt );
728 if(sitesToFlag.size()>0){
729 treport+="\nFlagged sites:"+"\n"+sitesToFlag.toString();
732 treport = String.format( "Disjoint reachability analysis took %.3f sec.", dt );
734 String justtime = String.format( "%.2f", dt );
735 System.out.println( treport );
739 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
743 if( state.DISJOINTWRITEIHMS && !suppressOutput ) {
747 if( state.DISJOINTWRITEINITCONTEXTS && !suppressOutput ) {
748 writeInitialContexts();
751 if( state.DISJOINTALIASFILE != null && !suppressOutput ) {
753 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
755 writeAllSharingJava(state.DISJOINTALIASFILE,
758 state.DISJOINTALIASTAB,
763 } catch( IOException e ) {
764 throw new Error( "IO Exception while writing disjointness analysis output." );
770 protected boolean moreDescriptorsToVisit() {
771 if( state.DISJOINTDVISITSTACK ||
772 state.DISJOINTDVISITSTACKEESONTOP
774 return !descriptorsToVisitStack.isEmpty();
776 } else if( state.DISJOINTDVISITPQUE ) {
777 return !descriptorsToVisitQ.isEmpty();
780 throw new Error( "Neither descriptor visiting mode set" );
784 // fixed-point computation over the call graph--when a
785 // method's callees are updated, it must be reanalyzed
786 protected void analyzeMethods() throws java.io.IOException {
788 // task or non-task (java) mode determines what the roots
789 // of the call chain are, and establishes the set of methods
790 // reachable from the roots that will be analyzed
793 System.out.println( "Bamboo mode..." );
795 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
796 while( taskItr.hasNext() ) {
797 TaskDescriptor td = (TaskDescriptor) taskItr.next();
798 if( !descriptorsToAnalyze.contains( td ) ) {
799 // add all methods transitively reachable from the
801 descriptorsToAnalyze.add( td );
802 descriptorsToAnalyze.addAll( callGraph.getAllMethods( td ) );
807 System.out.println( "Java mode..." );
809 // add all methods transitively reachable from the
810 // source's main to set for analysis
811 mdSourceEntry = typeUtil.getMain();
812 descriptorsToAnalyze.add( mdSourceEntry );
813 descriptorsToAnalyze.addAll( callGraph.getAllMethods( mdSourceEntry ) );
815 // fabricate an empty calling context that will call
816 // the source's main, but call graph doesn't know
817 // about it, so explicitly add it
818 makeAnalysisEntryMethod( mdSourceEntry );
819 descriptorsToAnalyze.add( mdAnalysisEntry );
823 // now, depending on the interprocedural mode for visiting
824 // methods, set up the needed data structures
826 if( state.DISJOINTDVISITPQUE ) {
828 // topologically sort according to the call graph so
829 // leaf calls are last, helps build contexts up first
830 LinkedList<Descriptor> sortedDescriptors =
831 topologicalSort( descriptorsToAnalyze );
833 // add sorted descriptors to priority queue, and duplicate
834 // the queue as a set for efficiently testing whether some
835 // method is marked for analysis
837 Iterator<Descriptor> dItr;
839 // for the priority queue, give items at the head
840 // of the sorted list a low number (highest priority)
841 while( !sortedDescriptors.isEmpty() ) {
842 Descriptor d = sortedDescriptors.removeFirst();
843 mapDescriptorToPriority.put( d, new Integer( p ) );
844 descriptorsToVisitQ.add( new DescriptorQWrapper( p, d ) );
845 descriptorsToVisitSet.add( d );
849 } else if( state.DISJOINTDVISITSTACK ||
850 state.DISJOINTDVISITSTACKEESONTOP
852 // if we're doing the stack scheme, just throw the root
853 // method or tasks on the stack
855 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
856 while( taskItr.hasNext() ) {
857 TaskDescriptor td = (TaskDescriptor) taskItr.next();
858 descriptorsToVisitStack.add( td );
859 descriptorsToVisitSet.add( td );
863 descriptorsToVisitStack.add( mdAnalysisEntry );
864 descriptorsToVisitSet.add( mdAnalysisEntry );
868 throw new Error( "Unknown method scheduling mode" );
872 // analyze scheduled methods until there are no more to visit
873 while( moreDescriptorsToVisit() ) {
876 if( state.DISJOINTDVISITSTACK ||
877 state.DISJOINTDVISITSTACKEESONTOP
879 d = descriptorsToVisitStack.pop();
881 } else if( state.DISJOINTDVISITPQUE ) {
882 d = descriptorsToVisitQ.poll().getDescriptor();
885 assert descriptorsToVisitSet.contains( d );
886 descriptorsToVisitSet.remove( d );
888 // because the task or method descriptor just extracted
889 // was in the "to visit" set it either hasn't been analyzed
890 // yet, or some method that it depends on has been
891 // updated. Recompute a complete reachability graph for
892 // this task/method and compare it to any previous result.
893 // If there is a change detected, add any methods/tasks
894 // that depend on this one to the "to visit" set.
896 System.out.println( "Analyzing " + d );
898 if( state.DISJOINTDVISITSTACKEESONTOP ) {
899 assert calleesToEnqueue.isEmpty();
902 ReachGraph rg = analyzeMethod( d );
903 ReachGraph rgPrev = getPartial( d );
905 if( !rg.equals( rgPrev ) ) {
908 if( state.DISJOINTDEBUGSCHEDULING ) {
909 System.out.println( " complete graph changed, scheduling callers for analysis:" );
912 // results for d changed, so enqueue dependents
913 // of d for further analysis
914 Iterator<Descriptor> depsItr = getDependents( d ).iterator();
915 while( depsItr.hasNext() ) {
916 Descriptor dNext = depsItr.next();
919 if( state.DISJOINTDEBUGSCHEDULING ) {
920 System.out.println( " "+dNext );
925 // whether or not the method under analysis changed,
926 // we may have some callees that are scheduled for
927 // more analysis, and they should go on the top of
928 // the stack now (in other method-visiting modes they
929 // are already enqueued at this point
930 if( state.DISJOINTDVISITSTACKEESONTOP ) {
931 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
932 while( depsItr.hasNext() ) {
933 Descriptor dNext = depsItr.next();
936 calleesToEnqueue.clear();
942 protected ReachGraph analyzeMethod( Descriptor d )
943 throws java.io.IOException {
945 // get the flat code for this descriptor
947 if( d == mdAnalysisEntry ) {
948 fm = fmAnalysisEntry;
950 fm = state.getMethodFlat( d );
952 pm.analyzeMethod( fm );
954 // intraprocedural work set
955 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
956 flatNodesToVisit.add( fm );
958 // if determinism is desired by client, shadow the
959 // set with a queue to make visit order deterministic
960 Queue<FlatNode> flatNodesToVisitQ = null;
961 if( determinismDesired ) {
962 flatNodesToVisitQ = new LinkedList<FlatNode>();
963 flatNodesToVisitQ.add( fm );
966 // mapping of current partial results
967 Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph =
968 new Hashtable<FlatNode, ReachGraph>();
970 // the set of return nodes partial results that will be combined as
971 // the final, conservative approximation of the entire method
972 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
974 while( !flatNodesToVisit.isEmpty() ) {
977 if( determinismDesired ) {
978 assert !flatNodesToVisitQ.isEmpty();
979 fn = flatNodesToVisitQ.remove();
981 fn = flatNodesToVisit.iterator().next();
983 flatNodesToVisit.remove( fn );
985 // effect transfer function defined by this node,
986 // then compare it to the old graph at this node
987 // to see if anything was updated.
989 ReachGraph rg = new ReachGraph();
990 TaskDescriptor taskDesc;
991 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null){
992 if(mapDescriptorToReachGraph.containsKey(taskDesc)){
993 // retrieve existing reach graph if it is not first time
994 rg=mapDescriptorToReachGraph.get(taskDesc);
996 // create initial reach graph for a task
997 rg=createInitialTaskReachGraph((FlatMethod)fn);
999 mapDescriptorToReachGraph.put(taskDesc, rg);
1003 // start by merging all node's parents' graphs
1004 for( int i = 0; i < pm.numPrev(fn); ++i ) {
1005 FlatNode pn = pm.getPrev(fn,i);
1006 if( mapFlatNodeToReachGraph.containsKey( pn ) ) {
1007 ReachGraph rgParent = mapFlatNodeToReachGraph.get( pn );
1008 rg.merge( rgParent );
1013 if( takeDebugSnapshots &&
1014 d.getSymbol().equals( descSymbolDebug )
1016 debugSnapshot( rg, fn, true );
1020 // modify rg with appropriate transfer function
1021 rg = analyzeFlatNode( d, fm, fn, setReturns, rg );
1024 if( takeDebugSnapshots &&
1025 d.getSymbol().equals( descSymbolDebug )
1027 debugSnapshot( rg, fn, false );
1032 // if the results of the new graph are different from
1033 // the current graph at this node, replace the graph
1034 // with the update and enqueue the children
1035 ReachGraph rgPrev = mapFlatNodeToReachGraph.get( fn );
1036 if( !rg.equals( rgPrev ) ) {
1037 mapFlatNodeToReachGraph.put( fn, rg );
1039 for( int i = 0; i < pm.numNext( fn ); i++ ) {
1040 FlatNode nn = pm.getNext( fn, i );
1042 flatNodesToVisit.add( nn );
1043 if( determinismDesired ) {
1044 flatNodesToVisitQ.add( nn );
1051 // end by merging all return nodes into a complete
1052 // reach graph that represents all possible heap
1053 // states after the flat method returns
1054 ReachGraph completeGraph = new ReachGraph();
1056 assert !setReturns.isEmpty();
1057 Iterator retItr = setReturns.iterator();
1058 while( retItr.hasNext() ) {
1059 FlatReturnNode frn = (FlatReturnNode) retItr.next();
1061 assert mapFlatNodeToReachGraph.containsKey( frn );
1062 ReachGraph rgRet = mapFlatNodeToReachGraph.get( frn );
1064 completeGraph.merge( rgRet );
1068 if( takeDebugSnapshots &&
1069 d.getSymbol().equals( descSymbolDebug )
1071 // increment that we've visited the debug snap
1072 // method, and reset the node counter
1073 System.out.println( " @@@ debug snap at visit "+snapVisitCounter );
1075 snapNodeCounter = 0;
1077 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
1080 System.out.println( "!!! Stopping analysis after debug snap captures. !!!" );
1086 return completeGraph;
1090 protected ReachGraph
1091 analyzeFlatNode( Descriptor d,
1092 FlatMethod fmContaining,
1094 HashSet<FlatReturnNode> setRetNodes,
1096 ) throws java.io.IOException {
1099 // any variables that are no longer live should be
1100 // nullified in the graph to reduce edges
1101 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1105 FieldDescriptor fld;
1106 TypeDescriptor tdElement;
1107 FieldDescriptor fdElement;
1108 FlatSESEEnterNode sese;
1109 FlatSESEExitNode fsexn;
1111 //Stores the flatnode's reach graph at enter
1112 fn2rgAtEnter.put(fn, rg);
1114 // use node type to decide what transfer function
1115 // to apply to the reachability graph
1116 switch( fn.kind() ) {
1118 case FKind.FlatGenReachNode: {
1119 FlatGenReachNode fgrn = (FlatGenReachNode) fn;
1121 System.out.println( "Generating a reach graph!" );
1122 rg.writeGraph( "genReach"+fgrn.getGraphName(),
1123 true, // write labels (variables)
1124 true, // selectively hide intermediate temp vars
1125 true, // prune unreachable heap regions
1126 false, // hide reachability altogether
1127 true, // hide subset reachability states
1128 true, // hide predicates
1129 true ); // hide edge taints
1133 case FKind.FlatMethod: {
1134 // construct this method's initial heap model (IHM)
1135 // since we're working on the FlatMethod, we know
1136 // the incoming ReachGraph 'rg' is empty
1138 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1139 getIHMcontributions( d );
1141 Set entrySet = heapsFromCallers.entrySet();
1142 Iterator itr = entrySet.iterator();
1143 while( itr.hasNext() ) {
1144 Map.Entry me = (Map.Entry) itr.next();
1145 FlatCall fc = (FlatCall) me.getKey();
1146 ReachGraph rgContrib = (ReachGraph) me.getValue();
1148 assert fc.getMethod().equals( d );
1150 rg.merge( rgContrib );
1153 // additionally, we are enforcing STRICT MONOTONICITY for the
1154 // method's initial context, so grow the context by whatever
1155 // the previously computed context was, and put the most
1156 // up-to-date context back in the map
1157 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get( d );
1158 rg.merge( rgPrevContext );
1159 mapDescriptorToInitialContext.put( d, rg );
1163 case FKind.FlatOpNode:
1164 FlatOpNode fon = (FlatOpNode) fn;
1165 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1166 lhs = fon.getDest();
1167 rhs = fon.getLeft();
1169 // before transfer, do effects analysis support
1170 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1171 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1172 // x gets status of y
1173 if(!rg.isAccessible(rhs)){
1174 rg.makeInaccessible(lhs);
1180 rg.assignTempXEqualToTempY( lhs, rhs );
1184 case FKind.FlatCastNode:
1185 FlatCastNode fcn = (FlatCastNode) fn;
1189 TypeDescriptor td = fcn.getType();
1192 // before transfer, do effects analysis support
1193 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1194 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1195 // x gets status of y
1196 if(!rg.isAccessible(rhs)){
1197 rg.makeInaccessible(lhs);
1203 rg.assignTempXEqualToCastedTempY( lhs, rhs, td );
1206 case FKind.FlatFieldNode:
1207 FlatFieldNode ffn = (FlatFieldNode) fn;
1211 fld = ffn.getField();
1213 // before graph transform, possible inject
1214 // a stall-site taint
1215 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1217 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1218 // x=y.f, stall y if not accessible
1219 // contributes read effects on stall site of y
1220 if(!rg.isAccessible(rhs)) {
1221 rg.taintStallSite(fn, rhs);
1224 // after this, x and y are accessbile.
1225 rg.makeAccessible(lhs);
1226 rg.makeAccessible(rhs);
1230 if( shouldAnalysisTrack( fld.getType() ) ) {
1232 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fld );
1235 // after transfer, use updated graph to
1236 // do effects analysis
1237 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1238 effectsAnalysis.analyzeFlatFieldNode( rg, rhs, fld );
1242 case FKind.FlatSetFieldNode:
1243 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1245 lhs = fsfn.getDst();
1246 fld = fsfn.getField();
1247 rhs = fsfn.getSrc();
1249 boolean strongUpdate = false;
1251 // before transfer func, possibly inject
1252 // stall-site taints
1253 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1255 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1256 // x.y=f , stall x and y if they are not accessible
1257 // also contribute write effects on stall site of x
1258 if(!rg.isAccessible(lhs)) {
1259 rg.taintStallSite(fn, lhs);
1262 if(!rg.isAccessible(rhs)) {
1263 rg.taintStallSite(fn, rhs);
1266 // accessible status update
1267 rg.makeAccessible(lhs);
1268 rg.makeAccessible(rhs);
1272 if( shouldAnalysisTrack( fld.getType() ) ) {
1274 strongUpdate = rg.assignTempXFieldFEqualToTempY( lhs, fld, rhs );
1277 // use transformed graph to do effects analysis
1278 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1279 effectsAnalysis.analyzeFlatSetFieldNode( rg, lhs, fld, strongUpdate );
1283 case FKind.FlatElementNode:
1284 FlatElementNode fen = (FlatElementNode) fn;
1289 assert rhs.getType() != null;
1290 assert rhs.getType().isArray();
1292 tdElement = rhs.getType().dereference();
1293 fdElement = getArrayField( tdElement );
1295 // before transfer func, possibly inject
1297 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1298 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1299 // x=y.f, stall y if not accessible
1300 // contributes read effects on stall site of y
1301 // after this, x and y are accessbile.
1302 if(!rg.isAccessible(rhs)) {
1303 rg.taintStallSite(fn, rhs);
1306 rg.makeAccessible(lhs);
1307 rg.makeAccessible(rhs);
1311 if( shouldAnalysisTrack( lhs.getType() ) ) {
1313 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fdElement );
1316 // use transformed graph to do effects analysis
1317 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1318 effectsAnalysis.analyzeFlatFieldNode( rg, rhs, fdElement );
1322 case FKind.FlatSetElementNode:
1323 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1325 lhs = fsen.getDst();
1326 rhs = fsen.getSrc();
1328 assert lhs.getType() != null;
1329 assert lhs.getType().isArray();
1331 tdElement = lhs.getType().dereference();
1332 fdElement = getArrayField( tdElement );
1334 // before transfer func, possibly inject
1335 // stall-site taints
1336 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1338 if(rblockStatus.isInCriticalRegion(fmContaining, fn)){
1339 // x.y=f , stall x and y if they are not accessible
1340 // also contribute write effects on stall site of x
1341 if(!rg.isAccessible(lhs)) {
1342 rg.taintStallSite(fn, lhs);
1345 if(!rg.isAccessible(rhs)) {
1346 rg.taintStallSite(fn, rhs);
1349 // accessible status update
1350 rg.makeAccessible(lhs);
1351 rg.makeAccessible(rhs);
1355 if( shouldAnalysisTrack( rhs.getType() ) ) {
1356 // transfer func, BUT
1357 // skip this node if it cannot create new reachability paths
1358 if( !arrayReferencees.doesNotCreateNewReaching( fsen ) ) {
1359 rg.assignTempXFieldFEqualToTempY( lhs, fdElement, rhs );
1363 // use transformed graph to do effects analysis
1364 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1365 effectsAnalysis.analyzeFlatSetFieldNode( rg, lhs, fdElement,
1371 FlatNew fnn = (FlatNew) fn;
1373 if( shouldAnalysisTrack( lhs.getType() ) ) {
1374 AllocSite as = getAllocSiteFromFlatNewPRIVATE( fnn );
1376 // before transform, support effects analysis
1377 if (doEffectsAnalysis && fmContaining != fmAnalysisEntry) {
1378 if (rblockStatus.isInCriticalRegion(fmContaining, fn)) {
1379 // after creating new object, lhs is accessible
1380 rg.makeAccessible(lhs);
1385 rg.assignTempEqualToNewAlloc( lhs, as );
1389 case FKind.FlatSESEEnterNode:
1390 sese = (FlatSESEEnterNode) fn;
1392 if( sese.getIsCallerSESEplaceholder() ) {
1393 // ignore these dummy rblocks!
1397 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1399 // always remove ALL stall site taints at enter
1400 rg.removeAllStallSiteTaints();
1402 // inject taints for in-set vars
1403 rg.taintInSetVars( sese );
1408 case FKind.FlatSESEExitNode:
1409 fsexn = (FlatSESEExitNode) fn;
1410 sese = fsexn.getFlatEnter();
1412 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1414 // @ sese exit make all live variables
1415 // inaccessible to later parent statements
1416 rg.makeInaccessible( liveness.getLiveInTemps( fmContaining, fn ) );
1418 // always remove ALL stall site taints at exit
1419 rg.removeAllStallSiteTaints();
1421 // remove in-set var taints for the exiting rblock
1422 rg.removeInContextTaints( sese );
1427 case FKind.FlatCall: {
1428 Descriptor mdCaller;
1429 if( fmContaining.getMethod() != null ){
1430 mdCaller = fmContaining.getMethod();
1432 mdCaller = fmContaining.getTask();
1434 FlatCall fc = (FlatCall) fn;
1435 MethodDescriptor mdCallee = fc.getMethod();
1436 FlatMethod fmCallee = state.getMethodFlat( mdCallee );
1439 if( mdCallee.getSymbol().equals( "genReach" ) ) {
1440 rg.writeGraph( "genReach"+d,
1441 true, // write labels (variables)
1442 true, // selectively hide intermediate temp vars
1443 true, // prune unreachable heap regions
1444 false, // hide reachability altogether
1445 true, // hide subset reachability states
1446 true, // hide predicates
1447 true ); // hide edge taints
1453 boolean debugCallSite =
1454 mdCaller.getSymbol().equals( state.DISJOINTDEBUGCALLER ) &&
1455 mdCallee.getSymbol().equals( state.DISJOINTDEBUGCALLEE );
1457 boolean writeDebugDOTs = false;
1458 boolean stopAfter = false;
1459 if( debugCallSite ) {
1460 ++ReachGraph.debugCallSiteVisitCounter;
1461 System.out.println( " $$$ Debug call site visit "+
1462 ReachGraph.debugCallSiteVisitCounter+
1466 (ReachGraph.debugCallSiteVisitCounter >=
1467 ReachGraph.debugCallSiteVisitStartCapture) &&
1469 (ReachGraph.debugCallSiteVisitCounter <
1470 ReachGraph.debugCallSiteVisitStartCapture +
1471 ReachGraph.debugCallSiteNumVisitsToCapture)
1473 writeDebugDOTs = true;
1474 System.out.println( " $$$ Capturing this call site visit $$$" );
1475 if( ReachGraph.debugCallSiteStopAfter &&
1476 (ReachGraph.debugCallSiteVisitCounter ==
1477 ReachGraph.debugCallSiteVisitStartCapture +
1478 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
1486 // calculate the heap this call site can reach--note this is
1487 // not used for the current call site transform, we are
1488 // grabbing this heap model for future analysis of the callees,
1489 // so if different results emerge we will return to this site
1490 ReachGraph heapForThisCall_old =
1491 getIHMcontribution( mdCallee, fc );
1493 // the computation of the callee-reachable heap
1494 // is useful for making the callee starting point
1495 // and for applying the call site transfer function
1496 Set<Integer> callerNodeIDsCopiedToCallee =
1497 new HashSet<Integer>();
1499 ReachGraph heapForThisCall_cur =
1500 rg.makeCalleeView( fc,
1502 callerNodeIDsCopiedToCallee,
1506 // enforce that a call site contribution can only
1507 // monotonically increase
1508 heapForThisCall_cur.merge( heapForThisCall_old );
1510 if( !heapForThisCall_cur.equals( heapForThisCall_old ) ) {
1511 // if heap at call site changed, update the contribution,
1512 // and reschedule the callee for analysis
1513 addIHMcontribution( mdCallee, fc, heapForThisCall_cur );
1515 // map a FlatCall to its enclosing method/task descriptor
1516 // so we can write that info out later
1517 fc2enclosing.put( fc, mdCaller );
1519 if( state.DISJOINTDEBUGSCHEDULING ) {
1520 System.out.println( " context changed, scheduling callee: "+mdCallee );
1523 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1524 calleesToEnqueue.add( mdCallee );
1526 enqueue( mdCallee );
1531 // the transformation for a call site should update the
1532 // current heap abstraction with any effects from the callee,
1533 // or if the method is virtual, the effects from any possible
1534 // callees, so find the set of callees...
1535 Set<MethodDescriptor> setPossibleCallees;
1536 if( determinismDesired ) {
1537 // use an ordered set
1538 setPossibleCallees = new TreeSet<MethodDescriptor>( dComp );
1540 // otherwise use a speedy hashset
1541 setPossibleCallees = new HashSet<MethodDescriptor>();
1544 if( mdCallee.isStatic() ) {
1545 setPossibleCallees.add( mdCallee );
1547 TypeDescriptor typeDesc = fc.getThis().getType();
1548 setPossibleCallees.addAll( callGraph.getMethods( mdCallee,
1553 ReachGraph rgMergeOfPossibleCallers = new ReachGraph();
1555 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1556 while( mdItr.hasNext() ) {
1557 MethodDescriptor mdPossible = mdItr.next();
1558 FlatMethod fmPossible = state.getMethodFlat( mdPossible );
1560 addDependent( mdPossible, // callee
1563 // don't alter the working graph (rg) until we compute a
1564 // result for every possible callee, merge them all together,
1565 // then set rg to that
1566 ReachGraph rgPossibleCaller = new ReachGraph();
1567 rgPossibleCaller.merge( rg );
1569 ReachGraph rgPossibleCallee = getPartial( mdPossible );
1571 if( rgPossibleCallee == null ) {
1572 // if this method has never been analyzed just schedule it
1573 // for analysis and skip over this call site for now
1574 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1575 calleesToEnqueue.add( mdPossible );
1577 enqueue( mdPossible );
1580 if( state.DISJOINTDEBUGSCHEDULING ) {
1581 System.out.println( " callee hasn't been analyzed, scheduling: "+mdPossible );
1586 // calculate the method call transform
1587 rgPossibleCaller.resolveMethodCall( fc,
1590 callerNodeIDsCopiedToCallee,
1594 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1595 if( !rgPossibleCallee.isAccessible( ReachGraph.tdReturn ) ) {
1596 rgPossibleCaller.makeInaccessible( fc.getReturnTemp() );
1602 rgMergeOfPossibleCallers.merge( rgPossibleCaller );
1607 System.out.println( "$$$ Exiting after requested captures of call site. $$$" );
1612 // now that we've taken care of building heap models for
1613 // callee analysis, finish this transformation
1614 rg = rgMergeOfPossibleCallers;
1617 // jjenista: what is this? It breaks compilation
1618 // of programs with no tasks/SESEs/rblocks...
1619 //XXXXXXXXXXXXXXXXXXXXXXXXX
1620 //need to consider more
1621 FlatNode nextFN=fmCallee.getNext(0);
1622 if( nextFN instanceof FlatSESEEnterNode ) {
1623 FlatSESEEnterNode calleeSESE=(FlatSESEEnterNode)nextFN;
1624 if(!calleeSESE.getIsLeafSESE()){
1625 rg.makeInaccessible( liveness.getLiveInTemps( fmContaining, fn ) );
1632 case FKind.FlatReturnNode:
1633 FlatReturnNode frn = (FlatReturnNode) fn;
1634 rhs = frn.getReturnTemp();
1636 // before transfer, do effects analysis support
1637 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1638 if(!rg.isAccessible(rhs)){
1639 rg.makeInaccessible(ReachGraph.tdReturn);
1643 if( rhs != null && shouldAnalysisTrack( rhs.getType() ) ) {
1644 rg.assignReturnEqualToTemp( rhs );
1647 setRetNodes.add( frn );
1653 // dead variables were removed before the above transfer function
1654 // was applied, so eliminate heap regions and edges that are no
1655 // longer part of the abstractly-live heap graph, and sweep up
1656 // and reachability effects that are altered by the reduction
1657 //rg.abstractGarbageCollect();
1661 // back edges are strictly monotonic
1662 if( pm.isBackEdge( fn ) ) {
1663 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get( fn );
1664 rg.merge( rgPrevResult );
1665 mapBackEdgeToMonotone.put( fn, rg );
1668 // at this point rg should be the correct update
1669 // by an above transfer function, or untouched if
1670 // the flat node type doesn't affect the heap
1676 // this method should generate integers strictly greater than zero!
1677 // special "shadow" regions are made from a heap region by negating
1679 static public Integer generateUniqueHeapRegionNodeID() {
1681 return new Integer( uniqueIDcount );
1686 static public FieldDescriptor getArrayField( TypeDescriptor tdElement ) {
1687 FieldDescriptor fdElement = mapTypeToArrayField.get( tdElement );
1688 if( fdElement == null ) {
1689 fdElement = new FieldDescriptor( new Modifiers( Modifiers.PUBLIC ),
1691 arrayElementFieldName,
1694 mapTypeToArrayField.put( tdElement, fdElement );
1701 private void writeFinalGraphs() {
1702 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
1703 Iterator itr = entrySet.iterator();
1704 while( itr.hasNext() ) {
1705 Map.Entry me = (Map.Entry) itr.next();
1706 Descriptor d = (Descriptor) me.getKey();
1707 ReachGraph rg = (ReachGraph) me.getValue();
1710 if( d instanceof TaskDescriptor ) {
1711 graphName = "COMPLETEtask"+d;
1713 graphName = "COMPLETE"+d;
1716 rg.writeGraph( graphName,
1717 true, // write labels (variables)
1718 true, // selectively hide intermediate temp vars
1719 true, // prune unreachable heap regions
1720 false, // hide reachability altogether
1721 true, // hide subset reachability states
1722 true, // hide predicates
1723 false ); // hide edge taints
1727 private void writeFinalIHMs() {
1728 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
1729 while( d2IHMsItr.hasNext() ) {
1730 Map.Entry me1 = (Map.Entry) d2IHMsItr.next();
1731 Descriptor d = (Descriptor) me1.getKey();
1732 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>) me1.getValue();
1734 Iterator fc2rgItr = IHMs.entrySet().iterator();
1735 while( fc2rgItr.hasNext() ) {
1736 Map.Entry me2 = (Map.Entry) fc2rgItr.next();
1737 FlatCall fc = (FlatCall) me2.getKey();
1738 ReachGraph rg = (ReachGraph) me2.getValue();
1740 rg.writeGraph( "IHMPARTFOR"+d+"FROM"+fc2enclosing.get( fc )+fc,
1741 true, // write labels (variables)
1742 true, // selectively hide intermediate temp vars
1743 true, // hide reachability altogether
1744 true, // prune unreachable heap regions
1745 true, // hide subset reachability states
1746 false, // hide predicates
1747 true ); // hide edge taints
1752 private void writeInitialContexts() {
1753 Set entrySet = mapDescriptorToInitialContext.entrySet();
1754 Iterator itr = entrySet.iterator();
1755 while( itr.hasNext() ) {
1756 Map.Entry me = (Map.Entry) itr.next();
1757 Descriptor d = (Descriptor) me.getKey();
1758 ReachGraph rg = (ReachGraph) me.getValue();
1760 rg.writeGraph( "INITIAL"+d,
1761 true, // write labels (variables)
1762 true, // selectively hide intermediate temp vars
1763 true, // prune unreachable heap regions
1764 false, // hide all reachability
1765 true, // hide subset reachability states
1766 true, // hide predicates
1767 false );// hide edge taints
1772 protected ReachGraph getPartial( Descriptor d ) {
1773 return mapDescriptorToCompleteReachGraph.get( d );
1776 protected void setPartial( Descriptor d, ReachGraph rg ) {
1777 mapDescriptorToCompleteReachGraph.put( d, rg );
1779 // when the flag for writing out every partial
1780 // result is set, we should spit out the graph,
1781 // but in order to give it a unique name we need
1782 // to track how many partial results for this
1783 // descriptor we've already written out
1784 if( writeAllIncrementalDOTs ) {
1785 if( !mapDescriptorToNumUpdates.containsKey( d ) ) {
1786 mapDescriptorToNumUpdates.put( d, new Integer( 0 ) );
1788 Integer n = mapDescriptorToNumUpdates.get( d );
1791 if( d instanceof TaskDescriptor ) {
1792 graphName = d+"COMPLETEtask"+String.format( "%05d", n );
1794 graphName = d+"COMPLETE"+String.format( "%05d", n );
1797 rg.writeGraph( graphName,
1798 true, // write labels (variables)
1799 true, // selectively hide intermediate temp vars
1800 true, // prune unreachable heap regions
1801 false, // hide all reachability
1802 true, // hide subset reachability states
1803 false, // hide predicates
1804 false); // hide edge taints
1806 mapDescriptorToNumUpdates.put( d, n + 1 );
1812 // return just the allocation site associated with one FlatNew node
1813 protected AllocSite getAllocSiteFromFlatNewPRIVATE( FlatNew fnew ) {
1815 boolean flagProgrammatically = false;
1816 if( sitesToFlag != null && sitesToFlag.contains( fnew ) ) {
1817 flagProgrammatically = true;
1820 if( !mapFlatNewToAllocSite.containsKey( fnew ) ) {
1821 AllocSite as = AllocSite.factory( allocationDepth,
1823 fnew.getDisjointId(),
1824 flagProgrammatically
1827 // the newest nodes are single objects
1828 for( int i = 0; i < allocationDepth; ++i ) {
1829 Integer id = generateUniqueHeapRegionNodeID();
1830 as.setIthOldest( i, id );
1831 mapHrnIdToAllocSite.put( id, as );
1834 // the oldest node is a summary node
1835 as.setSummary( generateUniqueHeapRegionNodeID() );
1837 mapFlatNewToAllocSite.put( fnew, as );
1840 return mapFlatNewToAllocSite.get( fnew );
1844 public static boolean shouldAnalysisTrack( TypeDescriptor type ) {
1845 // don't track primitive types, but an array
1846 // of primitives is heap memory
1847 if( type.isImmutable() ) {
1848 return type.isArray();
1851 // everything else is an object
1855 protected int numMethodsAnalyzed() {
1856 return descriptorsToAnalyze.size();
1863 // Take in source entry which is the program's compiled entry and
1864 // create a new analysis entry, a method that takes no parameters
1865 // and appears to allocate the command line arguments and call the
1866 // source entry with them. The purpose of this analysis entry is
1867 // to provide a top-level method context with no parameters left.
1868 protected void makeAnalysisEntryMethod( MethodDescriptor mdSourceEntry ) {
1870 Modifiers mods = new Modifiers();
1871 mods.addModifier( Modifiers.PUBLIC );
1872 mods.addModifier( Modifiers.STATIC );
1874 TypeDescriptor returnType =
1875 new TypeDescriptor( TypeDescriptor.VOID );
1877 this.mdAnalysisEntry =
1878 new MethodDescriptor( mods,
1880 "analysisEntryMethod"
1883 TempDescriptor cmdLineArgs =
1884 new TempDescriptor( "args",
1885 mdSourceEntry.getParamType( 0 )
1889 new FlatNew( mdSourceEntry.getParamType( 0 ),
1894 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
1895 sourceEntryArgs[0] = cmdLineArgs;
1898 new FlatCall( mdSourceEntry,
1904 FlatReturnNode frn = new FlatReturnNode( null );
1906 FlatExit fe = new FlatExit();
1908 this.fmAnalysisEntry =
1909 new FlatMethod( mdAnalysisEntry,
1913 this.fmAnalysisEntry.addNext( fn );
1920 protected LinkedList<Descriptor> topologicalSort( Set<Descriptor> toSort ) {
1922 Set<Descriptor> discovered;
1924 if( determinismDesired ) {
1925 // use an ordered set
1926 discovered = new TreeSet<Descriptor>( dComp );
1928 // otherwise use a speedy hashset
1929 discovered = new HashSet<Descriptor>();
1932 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
1934 Iterator<Descriptor> itr = toSort.iterator();
1935 while( itr.hasNext() ) {
1936 Descriptor d = itr.next();
1938 if( !discovered.contains( d ) ) {
1939 dfsVisit( d, toSort, sorted, discovered );
1946 // While we're doing DFS on call graph, remember
1947 // dependencies for efficient queuing of methods
1948 // during interprocedural analysis:
1950 // a dependent of a method decriptor d for this analysis is:
1951 // 1) a method or task that invokes d
1952 // 2) in the descriptorsToAnalyze set
1953 protected void dfsVisit( Descriptor d,
1954 Set <Descriptor> toSort,
1955 LinkedList<Descriptor> sorted,
1956 Set <Descriptor> discovered ) {
1957 discovered.add( d );
1959 // only methods have callers, tasks never do
1960 if( d instanceof MethodDescriptor ) {
1962 MethodDescriptor md = (MethodDescriptor) d;
1964 // the call graph is not aware that we have a fabricated
1965 // analysis entry that calls the program source's entry
1966 if( md == mdSourceEntry ) {
1967 if( !discovered.contains( mdAnalysisEntry ) ) {
1968 addDependent( mdSourceEntry, // callee
1969 mdAnalysisEntry // caller
1971 dfsVisit( mdAnalysisEntry, toSort, sorted, discovered );
1975 // otherwise call graph guides DFS
1976 Iterator itr = callGraph.getCallerSet( md ).iterator();
1977 while( itr.hasNext() ) {
1978 Descriptor dCaller = (Descriptor) itr.next();
1980 // only consider callers in the original set to analyze
1981 if( !toSort.contains( dCaller ) ) {
1985 if( !discovered.contains( dCaller ) ) {
1986 addDependent( md, // callee
1990 dfsVisit( dCaller, toSort, sorted, discovered );
1995 // for leaf-nodes last now!
1996 sorted.addLast( d );
2000 protected void enqueue( Descriptor d ) {
2002 if( !descriptorsToVisitSet.contains( d ) ) {
2004 if( state.DISJOINTDVISITSTACK ||
2005 state.DISJOINTDVISITSTACKEESONTOP
2007 descriptorsToVisitStack.add( d );
2009 } else if( state.DISJOINTDVISITPQUE ) {
2010 Integer priority = mapDescriptorToPriority.get( d );
2011 descriptorsToVisitQ.add( new DescriptorQWrapper( priority,
2016 descriptorsToVisitSet.add( d );
2021 // a dependent of a method decriptor d for this analysis is:
2022 // 1) a method or task that invokes d
2023 // 2) in the descriptorsToAnalyze set
2024 protected void addDependent( Descriptor callee, Descriptor caller ) {
2025 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
2026 if( deps == null ) {
2027 deps = new HashSet<Descriptor>();
2030 mapDescriptorToSetDependents.put( callee, deps );
2033 protected Set<Descriptor> getDependents( Descriptor callee ) {
2034 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
2035 if( deps == null ) {
2036 deps = new HashSet<Descriptor>();
2037 mapDescriptorToSetDependents.put( callee, deps );
2043 public Hashtable<FlatCall, ReachGraph> getIHMcontributions( Descriptor d ) {
2045 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2046 mapDescriptorToIHMcontributions.get( d );
2048 if( heapsFromCallers == null ) {
2049 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
2050 mapDescriptorToIHMcontributions.put( d, heapsFromCallers );
2053 return heapsFromCallers;
2056 public ReachGraph getIHMcontribution( Descriptor d,
2059 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2060 getIHMcontributions( d );
2062 if( !heapsFromCallers.containsKey( fc ) ) {
2066 return heapsFromCallers.get( fc );
2070 public void addIHMcontribution( Descriptor d,
2074 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2075 getIHMcontributions( d );
2077 heapsFromCallers.put( fc, rg );
2081 private AllocSite createParameterAllocSite( ReachGraph rg,
2082 TempDescriptor tempDesc,
2088 flatNew = new FlatNew( tempDesc.getType(), // type
2089 tempDesc, // param temp
2090 false, // global alloc?
2091 "param"+tempDesc // disjoint site ID string
2094 flatNew = new FlatNew( tempDesc.getType(), // type
2095 tempDesc, // param temp
2096 false, // global alloc?
2097 null // disjoint site ID string
2101 // create allocation site
2102 AllocSite as = AllocSite.factory( allocationDepth,
2104 flatNew.getDisjointId(),
2107 for (int i = 0; i < allocationDepth; ++i) {
2108 Integer id = generateUniqueHeapRegionNodeID();
2109 as.setIthOldest(i, id);
2110 mapHrnIdToAllocSite.put(id, as);
2112 // the oldest node is a summary node
2113 as.setSummary( generateUniqueHeapRegionNodeID() );
2121 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc){
2123 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
2124 if(!typeDesc.isImmutable()){
2125 ClassDescriptor classDesc = typeDesc.getClassDesc();
2126 for (Iterator it = classDesc.getFields(); it.hasNext();) {
2127 FieldDescriptor field = (FieldDescriptor) it.next();
2128 TypeDescriptor fieldType = field.getType();
2129 if (shouldAnalysisTrack( fieldType )) {
2130 fieldSet.add(field);
2138 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha ){
2140 int dimCount=fd.getType().getArrayCount();
2141 HeapRegionNode prevNode=null;
2142 HeapRegionNode arrayEntryNode=null;
2143 for(int i=dimCount;i>0;i--){
2144 TypeDescriptor typeDesc=fd.getType().dereference();//hack to get instance of type desc
2145 typeDesc.setArrayCount(i);
2146 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
2147 HeapRegionNode hrnSummary ;
2148 if(!mapToExistingNode.containsKey(typeDesc)){
2153 as = createParameterAllocSite(rg, tempDesc, false);
2155 // make a new reference to allocated node
2157 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2158 false, // single object?
2160 false, // out-of-context?
2161 as.getType(), // type
2162 as, // allocation site
2163 alpha, // inherent reach
2164 alpha, // current reach
2165 ExistPredSet.factory(rg.predTrue), // predicates
2166 tempDesc.toString() // description
2168 rg.id2hrn.put(as.getSummary(),hrnSummary);
2170 mapToExistingNode.put(typeDesc, hrnSummary);
2172 hrnSummary=mapToExistingNode.get(typeDesc);
2176 // make a new reference between new summary node and source
2177 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2180 fd.getSymbol(), // field name
2182 ExistPredSet.factory(rg.predTrue), // predicates
2186 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2187 prevNode=hrnSummary;
2188 arrayEntryNode=hrnSummary;
2190 // make a new reference between summary nodes of array
2191 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2194 arrayElementFieldName, // field name
2196 ExistPredSet.factory(rg.predTrue), // predicates
2200 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2201 prevNode=hrnSummary;
2206 // create a new obj node if obj has at least one non-primitive field
2207 TypeDescriptor type=fd.getType();
2208 if(getFieldSetTobeAnalyzed(type).size()>0){
2209 TypeDescriptor typeDesc=type.dereference();
2210 typeDesc.setArrayCount(0);
2211 if(!mapToExistingNode.containsKey(typeDesc)){
2212 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
2213 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
2214 // make a new reference to allocated node
2215 HeapRegionNode hrnSummary =
2216 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2217 false, // single object?
2219 false, // out-of-context?
2221 as, // allocation site
2222 alpha, // inherent reach
2223 alpha, // current reach
2224 ExistPredSet.factory(rg.predTrue), // predicates
2225 tempDesc.toString() // description
2227 rg.id2hrn.put(as.getSummary(),hrnSummary);
2228 mapToExistingNode.put(typeDesc, hrnSummary);
2229 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2232 arrayElementFieldName, // field name
2234 ExistPredSet.factory(rg.predTrue), // predicates
2237 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2238 prevNode=hrnSummary;
2240 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
2241 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null){
2242 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2245 arrayElementFieldName, // field name
2247 ExistPredSet.factory(rg.predTrue), // predicates
2250 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2252 prevNode=hrnSummary;
2256 map.put(arrayEntryNode, prevNode);
2257 return arrayEntryNode;
2260 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
2261 ReachGraph rg = new ReachGraph();
2262 TaskDescriptor taskDesc = fm.getTask();
2264 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
2265 Descriptor paramDesc = taskDesc.getParameter(idx);
2266 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
2268 // setup data structure
2269 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
2270 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
2271 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
2272 new Hashtable<TypeDescriptor, HeapRegionNode>();
2273 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
2274 new Hashtable<HeapRegionNode, HeapRegionNode>();
2275 Set<String> doneSet = new HashSet<String>();
2277 TempDescriptor tempDesc = fm.getParameter(idx);
2279 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
2280 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
2281 Integer idNewest = as.getIthOldest(0);
2282 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
2284 // make a new reference to allocated node
2285 RefEdge edgeNew = new RefEdge(lnX, // source
2287 taskDesc.getParamType(idx), // type
2289 hrnNewest.getAlpha(), // beta
2290 ExistPredSet.factory(rg.predTrue), // predicates
2293 rg.addRefEdge(lnX, hrnNewest, edgeNew);
2295 // set-up a work set for class field
2296 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
2297 for (Iterator it = classDesc.getFields(); it.hasNext();) {
2298 FieldDescriptor fd = (FieldDescriptor) it.next();
2299 TypeDescriptor fieldType = fd.getType();
2300 if (shouldAnalysisTrack( fieldType )) {
2301 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
2302 newMap.put(hrnNewest, fd);
2303 workSet.add(newMap);
2307 int uniqueIdentifier = 0;
2308 while (!workSet.isEmpty()) {
2309 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
2311 workSet.remove(map);
2313 Set<HeapRegionNode> key = map.keySet();
2314 HeapRegionNode srcHRN = key.iterator().next();
2315 FieldDescriptor fd = map.get(srcHRN);
2316 TypeDescriptor type = fd.getType();
2317 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
2319 if (!doneSet.contains(doneSetIdentifier)) {
2320 doneSet.add(doneSetIdentifier);
2321 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
2322 // create new summary Node
2323 TempDescriptor td = new TempDescriptor("temp"
2324 + uniqueIdentifier, type);
2326 AllocSite allocSite;
2327 if(type.equals(paramTypeDesc)){
2328 //corresponding allocsite has already been created for a parameter variable.
2331 allocSite = createParameterAllocSite(rg, td, false);
2333 String strDesc = allocSite.toStringForDOT()
2335 TypeDescriptor allocType=allocSite.getType();
2337 HeapRegionNode hrnSummary;
2338 if(allocType.isArray() && allocType.getArrayCount()>0){
2339 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
2342 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
2343 false, // single object?
2345 false, // out-of-context?
2346 allocSite.getType(), // type
2347 allocSite, // allocation site
2348 hrnNewest.getAlpha(), // inherent reach
2349 hrnNewest.getAlpha(), // current reach
2350 ExistPredSet.factory(rg.predTrue), // predicates
2351 strDesc // description
2353 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
2355 // make a new reference to summary node
2356 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2359 fd.getSymbol(), // field name
2360 hrnNewest.getAlpha(), // beta
2361 ExistPredSet.factory(rg.predTrue), // predicates
2365 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2369 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2371 // set-up a work set for fields of the class
2372 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2373 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2375 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2377 HeapRegionNode newDstHRN;
2378 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)){
2379 //related heap region node is already exsited.
2380 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2382 newDstHRN=hrnSummary;
2384 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2385 if(!doneSet.contains(doneSetIdentifier)){
2386 // add new work item
2387 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2388 new HashMap<HeapRegionNode, FieldDescriptor>();
2389 newMap.put(newDstHRN, fieldDescriptor);
2390 workSet.add(newMap);
2395 // if there exists corresponding summary node
2396 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2398 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2400 fd.getType(), // type
2401 fd.getSymbol(), // field name
2402 srcHRN.getAlpha(), // beta
2403 ExistPredSet.factory(rg.predTrue), // predicates
2406 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2416 // return all allocation sites in the method (there is one allocation
2417 // site per FlatNew node in a method)
2418 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2419 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2420 buildAllocationSiteSet(d);
2423 return mapDescriptorToAllocSiteSet.get(d);
2427 private void buildAllocationSiteSet(Descriptor d) {
2428 HashSet<AllocSite> s = new HashSet<AllocSite>();
2431 if( d instanceof MethodDescriptor ) {
2432 fm = state.getMethodFlat( (MethodDescriptor) d);
2434 assert d instanceof TaskDescriptor;
2435 fm = state.getMethodFlat( (TaskDescriptor) d);
2437 pm.analyzeMethod(fm);
2439 // visit every node in this FlatMethod's IR graph
2440 // and make a set of the allocation sites from the
2441 // FlatNew node's visited
2442 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2443 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2446 while( !toVisit.isEmpty() ) {
2447 FlatNode n = toVisit.iterator().next();
2449 if( n instanceof FlatNew ) {
2450 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2456 for( int i = 0; i < pm.numNext(n); ++i ) {
2457 FlatNode child = pm.getNext(n, i);
2458 if( !visited.contains(child) ) {
2464 mapDescriptorToAllocSiteSet.put(d, s);
2467 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
2469 HashSet<AllocSite> out = new HashSet<AllocSite>();
2470 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2471 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2475 while (!toVisit.isEmpty()) {
2476 Descriptor d = toVisit.iterator().next();
2480 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2481 Iterator asItr = asSet.iterator();
2482 while (asItr.hasNext()) {
2483 AllocSite as = (AllocSite) asItr.next();
2484 if (as.getDisjointAnalysisId() != null) {
2489 // enqueue callees of this method to be searched for
2490 // allocation sites also
2491 Set callees = callGraph.getCalleeSet(d);
2492 if (callees != null) {
2493 Iterator methItr = callees.iterator();
2494 while (methItr.hasNext()) {
2495 MethodDescriptor md = (MethodDescriptor) methItr.next();
2497 if (!visited.contains(md)) {
2508 private HashSet<AllocSite>
2509 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
2511 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
2512 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2513 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2517 // traverse this task and all methods reachable from this task
2518 while( !toVisit.isEmpty() ) {
2519 Descriptor d = toVisit.iterator().next();
2523 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2524 Iterator asItr = asSet.iterator();
2525 while( asItr.hasNext() ) {
2526 AllocSite as = (AllocSite) asItr.next();
2527 TypeDescriptor typed = as.getType();
2528 if( typed != null ) {
2529 ClassDescriptor cd = typed.getClassDesc();
2530 if( cd != null && cd.hasFlags() ) {
2536 // enqueue callees of this method to be searched for
2537 // allocation sites also
2538 Set callees = callGraph.getCalleeSet(d);
2539 if( callees != null ) {
2540 Iterator methItr = callees.iterator();
2541 while( methItr.hasNext() ) {
2542 MethodDescriptor md = (MethodDescriptor) methItr.next();
2544 if( !visited.contains(md) ) {
2554 public Set<Descriptor> getDescriptorsToAnalyze() {
2555 return descriptorsToAnalyze;
2558 public EffectsAnalysis getEffectsAnalysis(){
2559 return effectsAnalysis;
2562 public ReachGraph getReachGraph(Descriptor d){
2563 return mapDescriptorToCompleteReachGraph.get(d);
2566 public ReachGraph getEnterReachGraph(FlatNode fn){
2567 return fn2rgAtEnter.get(fn);
2570 // get successive captures of the analysis state, use compiler
2572 boolean takeDebugSnapshots = false;
2573 String descSymbolDebug = null;
2574 boolean stopAfterCapture = false;
2575 int snapVisitCounter = 0;
2576 int snapNodeCounter = 0;
2577 int visitStartCapture = 0;
2578 int numVisitsToCapture = 0;
2581 void debugSnapshot( ReachGraph rg, FlatNode fn, boolean in ) {
2582 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
2590 if( snapVisitCounter >= visitStartCapture ) {
2591 System.out.println( " @@@ snapping visit="+snapVisitCounter+
2592 ", node="+snapNodeCounter+
2596 graphName = String.format( "snap%03d_%04din",
2600 graphName = String.format( "snap%03d_%04dout",
2605 graphName = graphName + fn;
2607 rg.writeGraph( graphName,
2608 true, // write labels (variables)
2609 true, // selectively hide intermediate temp vars
2610 true, // prune unreachable heap regions
2611 false, // hide reachability
2612 false, // hide subset reachability states
2613 true, // hide predicates
2614 true ); // hide edge taints