1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
6 import Analysis.OoOJava.Accessible;
7 import Analysis.OoOJava.RBlockRelationAnalysis;
10 import IR.Tree.Modifiers;
15 public class DisjointAnalysis implements HeapAnalysis {
17 ///////////////////////////////////////////
19 // Public interface to discover possible
20 // sharing in the program under analysis
22 ///////////////////////////////////////////
24 // if an object allocated at the target site may be
25 // reachable from both an object from root1 and an
26 // object allocated at root2, return TRUE
27 public boolean mayBothReachTarget(FlatMethod fm,
32 AllocSite asr1 = getAllocationSiteFromFlatNew(fnRoot1);
33 AllocSite asr2 = getAllocationSiteFromFlatNew(fnRoot2);
34 assert asr1.isFlagged();
35 assert asr2.isFlagged();
37 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
38 ReachGraph rg = getPartial(fm.getMethod() );
40 return rg.mayBothReachTarget(asr1, asr2, ast);
43 // similar to the method above, return TRUE if ever
44 // more than one object from the root allocation site
45 // may reach an object from the target site
46 public boolean mayManyReachTarget(FlatMethod fm,
50 AllocSite asr = getAllocationSiteFromFlatNew(fnRoot);
51 assert asr.isFlagged();
53 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
54 ReachGraph rg = getPartial(fm.getMethod() );
56 return rg.mayManyReachTarget(asr, ast);
62 public HashSet<AllocSite>
63 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
64 checkAnalysisComplete();
65 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
68 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
69 checkAnalysisComplete();
70 return getAllocSiteFromFlatNewPRIVATE(fn);
73 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
74 checkAnalysisComplete();
75 return mapHrnIdToAllocSite.get(id);
78 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
81 checkAnalysisComplete();
82 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
83 FlatMethod fm=state.getMethodFlat(taskOrMethod);
85 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
88 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
89 int paramIndex, AllocSite alloc) {
90 checkAnalysisComplete();
91 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
92 FlatMethod fm=state.getMethodFlat(taskOrMethod);
94 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
97 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
98 AllocSite alloc, int paramIndex) {
99 checkAnalysisComplete();
100 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
101 FlatMethod fm=state.getMethodFlat(taskOrMethod);
103 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
106 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
107 AllocSite alloc1, AllocSite alloc2) {
108 checkAnalysisComplete();
109 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
111 return rg.mayReachSharedObjects(alloc1, alloc2);
114 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
115 checkAnalysisComplete();
119 Iterator<HeapRegionNode> i = s.iterator();
120 while (i.hasNext()) {
121 HeapRegionNode n = i.next();
123 AllocSite as = n.getAllocSite();
125 out += " " + n.toString() + ",\n";
127 out += " " + n.toString() + ": " + as.toStringVerbose()
136 // use the methods given above to check every possible sharing class
137 // between task parameters and flagged allocation sites reachable
139 public void writeAllSharing(String outputFile,
142 boolean tabularOutput,
145 throws java.io.IOException {
146 checkAnalysisComplete();
148 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
150 if (!tabularOutput) {
151 bw.write("Conducting ownership analysis with allocation depth = "
152 + allocationDepth + "\n");
153 bw.write(timeReport + "\n");
158 // look through every task for potential sharing
159 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
160 while (taskItr.hasNext()) {
161 TaskDescriptor td = (TaskDescriptor) taskItr.next();
163 if (!tabularOutput) {
164 bw.write("\n---------" + td + "--------\n");
167 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
169 Set<HeapRegionNode> common;
171 // for each task parameter, check for sharing classes with
172 // other task parameters and every allocation site
173 // reachable from this task
174 boolean foundSomeSharing = false;
176 FlatMethod fm = state.getMethodFlat(td);
177 for (int i = 0; i < fm.numParameters(); ++i) {
179 // skip parameters with types that cannot reference
181 if( !shouldAnalysisTrack(fm.getParameter(i).getType() ) ) {
185 // for the ith parameter check for sharing classes to all
186 // higher numbered parameters
187 for (int j = i + 1; j < fm.numParameters(); ++j) {
189 // skip parameters with types that cannot reference
191 if( !shouldAnalysisTrack(fm.getParameter(j).getType() ) ) {
196 common = hasPotentialSharing(td, i, j);
197 if (!common.isEmpty()) {
198 foundSomeSharing = true;
200 if (!tabularOutput) {
201 bw.write("Potential sharing between parameters " + i
202 + " and " + j + ".\n");
203 bw.write(prettyPrintNodeSet(common) + "\n");
208 // for the ith parameter, check for sharing classes against
209 // the set of allocation sites reachable from this
211 Iterator allocItr = allocSites.iterator();
212 while (allocItr.hasNext()) {
213 AllocSite as = (AllocSite) allocItr.next();
214 common = hasPotentialSharing(td, i, as);
215 if (!common.isEmpty()) {
216 foundSomeSharing = true;
218 if (!tabularOutput) {
219 bw.write("Potential sharing between parameter " + i
220 + " and " + as.getFlatNew() + ".\n");
221 bw.write(prettyPrintNodeSet(common) + "\n");
227 // for each allocation site check for sharing classes with
228 // other allocation sites in the context of execution
230 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
231 Iterator allocItr1 = allocSites.iterator();
232 while (allocItr1.hasNext()) {
233 AllocSite as1 = (AllocSite) allocItr1.next();
235 Iterator allocItr2 = allocSites.iterator();
236 while (allocItr2.hasNext()) {
237 AllocSite as2 = (AllocSite) allocItr2.next();
239 if (!outerChecked.contains(as2)) {
240 common = hasPotentialSharing(td, as1, as2);
242 if (!common.isEmpty()) {
243 foundSomeSharing = true;
245 if (!tabularOutput) {
246 bw.write("Potential sharing between "
247 + as1.getFlatNew() + " and "
248 + as2.getFlatNew() + ".\n");
249 bw.write(prettyPrintNodeSet(common) + "\n");
255 outerChecked.add(as1);
258 if (!foundSomeSharing) {
259 if (!tabularOutput) {
260 bw.write("No sharing between flagged objects in Task " + td
268 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
269 + " & " + numMethodsAnalyzed() + " \\\\\n");
271 bw.write("\nNumber sharing classes: "+numSharing);
279 // this version of writeAllSharing is for Java programs that have no tasks
280 // ***********************************
281 // WARNING: THIS DOES NOT DO THE RIGHT THING, REPORTS 0 ALWAYS!
282 // It should use mayBothReachTarget and mayManyReachTarget like
283 // OoOJava does to query analysis results
284 // ***********************************
285 public void writeAllSharingJava(String outputFile,
288 boolean tabularOutput,
291 throws java.io.IOException {
292 checkAnalysisComplete();
298 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
300 bw.write("Conducting disjoint reachability analysis with allocation depth = "
301 + allocationDepth + "\n");
302 bw.write(timeReport + "\n\n");
304 boolean foundSomeSharing = false;
306 Descriptor d = typeUtil.getMain();
307 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
309 // for each allocation site check for sharing classes with
310 // other allocation sites in the context of execution
312 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
313 Iterator allocItr1 = allocSites.iterator();
314 while (allocItr1.hasNext()) {
315 AllocSite as1 = (AllocSite) allocItr1.next();
317 Iterator allocItr2 = allocSites.iterator();
318 while (allocItr2.hasNext()) {
319 AllocSite as2 = (AllocSite) allocItr2.next();
321 if (!outerChecked.contains(as2)) {
322 Set<HeapRegionNode> common = hasPotentialSharing(d,
325 if (!common.isEmpty()) {
326 foundSomeSharing = true;
327 bw.write("Potential sharing between "
328 + as1.getDisjointAnalysisId() + " and "
329 + as2.getDisjointAnalysisId() + ".\n");
330 bw.write(prettyPrintNodeSet(common) + "\n");
336 outerChecked.add(as1);
339 if (!foundSomeSharing) {
340 bw.write("No sharing classes between flagged objects found.\n");
342 bw.write("\nNumber sharing classes: "+numSharing);
345 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
350 ///////////////////////////////////////////
352 // end public interface
354 ///////////////////////////////////////////
358 protected void checkAnalysisComplete() {
359 if( !analysisComplete ) {
360 throw new Error("Warning: public interface method called while analysis is running.");
369 // run in faster mode, only when bugs wrung out!
370 public static boolean releaseMode;
372 // use command line option to set this, analysis
373 // should attempt to be deterministic
374 public static boolean determinismDesired;
376 // when we want to enforce determinism in the
377 // analysis we need to sort descriptors rather
378 // than toss them in efficient sets, use this
379 public static DescriptorComparator dComp =
380 new DescriptorComparator();
383 // data from the compiler
385 public CallGraph callGraph;
386 public Liveness liveness;
387 public ArrayReferencees arrayReferencees;
388 public RBlockRelationAnalysis rblockRel;
389 public TypeUtil typeUtil;
390 public int allocationDepth;
392 protected boolean doEffectsAnalysis = false;
393 protected EffectsAnalysis effectsAnalysis;
394 protected BuildStateMachines buildStateMachines;
397 // data structure for public interface
398 private Hashtable< Descriptor, HashSet<AllocSite> >
399 mapDescriptorToAllocSiteSet;
402 // for public interface methods to warn that they
403 // are grabbing results during analysis
404 private boolean analysisComplete;
407 // used to identify HeapRegionNode objects
408 // A unique ID equates an object in one
409 // ownership graph with an object in another
410 // graph that logically represents the same
412 // start at 10 and increment to reserve some
413 // IDs for special purposes
414 static protected int uniqueIDcount = 10;
417 // An out-of-scope method created by the
418 // analysis that has no parameters, and
419 // appears to allocate the command line
420 // arguments, then invoke the source code's
421 // main method. The purpose of this is to
422 // provide the analysis with an explicit
423 // top-level context with no parameters
424 protected MethodDescriptor mdAnalysisEntry;
425 protected FlatMethod fmAnalysisEntry;
427 // main method defined by source program
428 protected MethodDescriptor mdSourceEntry;
430 // the set of task and/or method descriptors
431 // reachable in call graph
432 protected Set<Descriptor>
433 descriptorsToAnalyze;
435 // current descriptors to visit in fixed-point
436 // interprocedural analysis, prioritized by
437 // dependency in the call graph
438 protected Stack<Descriptor>
439 descriptorsToVisitStack;
440 protected PriorityQueue<DescriptorQWrapper>
443 // a duplication of the above structure, but
444 // for efficient testing of inclusion
445 protected HashSet<Descriptor>
446 descriptorsToVisitSet;
448 // storage for priorities (doesn't make sense)
449 // to add it to the Descriptor class, just in
451 protected Hashtable<Descriptor, Integer>
452 mapDescriptorToPriority;
454 // when analyzing a method and scheduling more:
455 // remember set of callee's enqueued for analysis
456 // so they can be put on top of the callers in
457 // the stack-visit mode
458 protected Set<Descriptor>
461 // maps a descriptor to its current partial result
462 // from the intraprocedural fixed-point analysis--
463 // then the interprocedural analysis settles, this
464 // mapping will have the final results for each
466 protected Hashtable<Descriptor, ReachGraph>
467 mapDescriptorToCompleteReachGraph;
469 // maps a descriptor to its known dependents: namely
470 // methods or tasks that call the descriptor's method
471 // AND are part of this analysis (reachable from main)
472 protected Hashtable< Descriptor, Set<Descriptor> >
473 mapDescriptorToSetDependents;
475 // if the analysis client wants to flag allocation sites
476 // programmatically, it should provide a set of FlatNew
477 // statements--this may be null if unneeded
478 protected Set<FlatNew> sitesToFlag;
480 // maps each flat new to one analysis abstraction
481 // allocate site object, these exist outside reach graphs
482 protected Hashtable<FlatNew, AllocSite>
483 mapFlatNewToAllocSite;
485 // maps intergraph heap region IDs to intergraph
486 // allocation sites that created them, a redundant
487 // structure for efficiency in some operations
488 protected Hashtable<Integer, AllocSite>
491 // maps a method to its initial heap model (IHM) that
492 // is the set of reachability graphs from every caller
493 // site, all merged together. The reason that we keep
494 // them separate is that any one call site's contribution
495 // to the IHM may changed along the path to the fixed point
496 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
497 mapDescriptorToIHMcontributions;
499 // additionally, keep a mapping from descriptors to the
500 // merged in-coming initial context, because we want this
501 // initial context to be STRICTLY MONOTONIC
502 protected Hashtable<Descriptor, ReachGraph>
503 mapDescriptorToInitialContext;
505 // make the result for back edges analysis-wide STRICTLY
506 // MONOTONIC as well, but notice we use FlatNode as the
507 // key for this map: in case we want to consider other
508 // nodes as back edge's in future implementations
509 protected Hashtable<FlatNode, ReachGraph>
510 mapBackEdgeToMonotone;
513 public static final String arrayElementFieldName = "___element_";
514 static protected Hashtable<TypeDescriptor, FieldDescriptor>
518 protected boolean suppressOutput;
520 // for controlling DOT file output
521 protected boolean writeFinalDOTs;
522 protected boolean writeAllIncrementalDOTs;
524 // supporting DOT output--when we want to write every
525 // partial method result, keep a tally for generating
527 protected Hashtable<Descriptor, Integer>
528 mapDescriptorToNumUpdates;
530 //map task descriptor to initial task parameter
531 protected Hashtable<Descriptor, ReachGraph>
532 mapDescriptorToReachGraph;
534 protected PointerMethod pm;
536 //Keeps track of all the reach graphs at every program point
537 //DO NOT USE UNLESS YOU REALLY NEED IT
538 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtEnter =
539 new Hashtable<FlatNode, ReachGraph>();
541 private Hashtable<FlatCall, Descriptor> fc2enclosing;
543 Accessible accessible;
545 // allocate various structures that are not local
546 // to a single class method--should be done once
547 protected void allocateStructures() {
549 if( determinismDesired ) {
550 // use an ordered set
551 descriptorsToAnalyze = new TreeSet<Descriptor>(dComp);
553 // otherwise use a speedy hashset
554 descriptorsToAnalyze = new HashSet<Descriptor>();
557 mapDescriptorToCompleteReachGraph =
558 new Hashtable<Descriptor, ReachGraph>();
560 mapDescriptorToNumUpdates =
561 new Hashtable<Descriptor, Integer>();
563 mapDescriptorToSetDependents =
564 new Hashtable< Descriptor, Set<Descriptor> >();
566 mapFlatNewToAllocSite =
567 new Hashtable<FlatNew, AllocSite>();
569 mapDescriptorToIHMcontributions =
570 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
572 mapDescriptorToInitialContext =
573 new Hashtable<Descriptor, ReachGraph>();
575 mapBackEdgeToMonotone =
576 new Hashtable<FlatNode, ReachGraph>();
578 mapHrnIdToAllocSite =
579 new Hashtable<Integer, AllocSite>();
581 mapTypeToArrayField =
582 new Hashtable <TypeDescriptor, FieldDescriptor>();
584 if( state.DISJOINTDVISITSTACK ||
585 state.DISJOINTDVISITSTACKEESONTOP
587 descriptorsToVisitStack =
588 new Stack<Descriptor>();
591 if( state.DISJOINTDVISITPQUE ) {
592 descriptorsToVisitQ =
593 new PriorityQueue<DescriptorQWrapper>();
596 descriptorsToVisitSet =
597 new HashSet<Descriptor>();
599 mapDescriptorToPriority =
600 new Hashtable<Descriptor, Integer>();
603 new HashSet<Descriptor>();
605 mapDescriptorToAllocSiteSet =
606 new Hashtable<Descriptor, HashSet<AllocSite> >();
608 mapDescriptorToReachGraph =
609 new Hashtable<Descriptor, ReachGraph>();
611 pm = new PointerMethod();
613 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
618 // this analysis generates a disjoint reachability
619 // graph for every reachable method in the program
620 public DisjointAnalysis(State s,
625 Set<FlatNew> sitesToFlag,
626 RBlockRelationAnalysis rra
628 init(s, tu, cg, l, ar, sitesToFlag, rra, null, false);
631 public DisjointAnalysis(State s,
636 Set<FlatNew> sitesToFlag,
637 RBlockRelationAnalysis rra,
638 boolean suppressOutput
640 init(s, tu, cg, l, ar, sitesToFlag, rra, null, suppressOutput);
643 public DisjointAnalysis(State s,
648 Set<FlatNew> sitesToFlag,
649 RBlockRelationAnalysis rra,
650 BuildStateMachines bsm,
651 boolean suppressOutput
653 init(s, tu, cg, l, ar, sitesToFlag, rra, bsm, suppressOutput);
656 protected void init(State state,
660 ArrayReferencees arrayReferencees,
661 Set<FlatNew> sitesToFlag,
662 RBlockRelationAnalysis rra,
663 BuildStateMachines bsm,
664 boolean suppressOutput
667 analysisComplete = false;
670 this.typeUtil = typeUtil;
671 this.callGraph = callGraph;
672 this.liveness = liveness;
673 this.arrayReferencees = arrayReferencees;
674 this.sitesToFlag = sitesToFlag;
675 this.rblockRel = rra;
676 this.suppressOutput = suppressOutput;
677 this.buildStateMachines = bsm;
679 if( rblockRel != null ) {
680 doEffectsAnalysis = true;
681 effectsAnalysis = new EffectsAnalysis();
683 EffectsAnalysis.state = state;
684 EffectsAnalysis.buildStateMachines = buildStateMachines;
686 //note: instead of reachgraph's isAccessible, using the result of accessible analysis
687 //since accessible gives us more accurate results
688 accessible=new Accessible(state, callGraph, rra, liveness);
689 accessible.doAnalysis();
692 this.allocationDepth = state.DISJOINTALLOCDEPTH;
693 this.releaseMode = state.DISJOINTRELEASEMODE;
694 this.determinismDesired = state.DISJOINTDETERMINISM;
696 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL;
697 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL;
699 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
700 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
701 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
702 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
703 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
704 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
705 this.snapNodeCounter = 0; // count nodes from 0
708 state.DISJOINTDVISITSTACK ||
709 state.DISJOINTDVISITPQUE ||
710 state.DISJOINTDVISITSTACKEESONTOP;
711 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
712 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
713 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
715 // set some static configuration for ReachGraphs
716 ReachGraph.allocationDepth = allocationDepth;
717 ReachGraph.typeUtil = typeUtil;
718 ReachGraph.state = state;
720 ReachGraph.debugCallSiteVisitStartCapture
721 = state.DISJOINTDEBUGCALLVISITTOSTART;
723 ReachGraph.debugCallSiteNumVisitsToCapture
724 = state.DISJOINTDEBUGCALLNUMVISITS;
726 ReachGraph.debugCallSiteStopAfter
727 = state.DISJOINTDEBUGCALLSTOPAFTER;
729 ReachGraph.debugCallSiteVisitCounter
730 = 0; // count visits from 1, is incremented before first visit
735 if( suppressOutput ) {
736 System.out.println("* Running disjoint reachability analysis with output suppressed! *");
739 allocateStructures();
741 double timeStartAnalysis = (double) System.nanoTime();
743 // start interprocedural fixed-point computation
746 } catch( IOException e ) {
747 throw new Error("IO Exception while writing disjointness analysis output.");
750 analysisComplete=true;
752 double timeEndAnalysis = (double) System.nanoTime();
753 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow(10.0, 9.0) );
756 if( sitesToFlag != null ) {
757 treport = String.format("Disjoint reachability analysis flagged %d sites and took %.3f sec.", sitesToFlag.size(), dt);
758 if(sitesToFlag.size()>0) {
759 treport+="\nFlagged sites:"+"\n"+sitesToFlag.toString();
762 treport = String.format("Disjoint reachability analysis took %.3f sec.", dt);
764 String justtime = String.format("%.2f", dt);
765 System.out.println(treport);
769 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
773 if( state.DISJOINTWRITEIHMS && !suppressOutput ) {
777 if( state.DISJOINTWRITEINITCONTEXTS && !suppressOutput ) {
778 writeInitialContexts();
781 if( state.DISJOINTALIASFILE != null && !suppressOutput ) {
783 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
785 writeAllSharingJava(state.DISJOINTALIASFILE,
788 state.DISJOINTALIASTAB,
795 buildStateMachines.writeStateMachines();
798 } catch( IOException e ) {
799 throw new Error("IO Exception while writing disjointness analysis output.");
804 protected boolean moreDescriptorsToVisit() {
805 if( state.DISJOINTDVISITSTACK ||
806 state.DISJOINTDVISITSTACKEESONTOP
808 return !descriptorsToVisitStack.isEmpty();
810 } else if( state.DISJOINTDVISITPQUE ) {
811 return !descriptorsToVisitQ.isEmpty();
814 throw new Error("Neither descriptor visiting mode set");
818 // fixed-point computation over the call graph--when a
819 // method's callees are updated, it must be reanalyzed
820 protected void analyzeMethods() throws java.io.IOException {
822 // task or non-task (java) mode determines what the roots
823 // of the call chain are, and establishes the set of methods
824 // reachable from the roots that will be analyzed
827 if( !suppressOutput ) {
828 System.out.println("Bamboo mode...");
831 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
832 while( taskItr.hasNext() ) {
833 TaskDescriptor td = (TaskDescriptor) taskItr.next();
834 if( !descriptorsToAnalyze.contains(td) ) {
835 // add all methods transitively reachable from the
837 descriptorsToAnalyze.add(td);
838 descriptorsToAnalyze.addAll(callGraph.getAllMethods(td) );
843 if( !suppressOutput ) {
844 System.out.println("Java mode...");
847 // add all methods transitively reachable from the
848 // source's main to set for analysis
849 mdSourceEntry = typeUtil.getMain();
850 descriptorsToAnalyze.add(mdSourceEntry);
851 descriptorsToAnalyze.addAll(callGraph.getAllMethods(mdSourceEntry) );
853 // fabricate an empty calling context that will call
854 // the source's main, but call graph doesn't know
855 // about it, so explicitly add it
856 makeAnalysisEntryMethod(mdSourceEntry);
857 descriptorsToAnalyze.add(mdAnalysisEntry);
861 // now, depending on the interprocedural mode for visiting
862 // methods, set up the needed data structures
864 if( state.DISJOINTDVISITPQUE ) {
866 // topologically sort according to the call graph so
867 // leaf calls are last, helps build contexts up first
868 LinkedList<Descriptor> sortedDescriptors =
869 topologicalSort(descriptorsToAnalyze);
871 // add sorted descriptors to priority queue, and duplicate
872 // the queue as a set for efficiently testing whether some
873 // method is marked for analysis
875 Iterator<Descriptor> dItr;
877 // for the priority queue, give items at the head
878 // of the sorted list a low number (highest priority)
879 while( !sortedDescriptors.isEmpty() ) {
880 Descriptor d = sortedDescriptors.removeFirst();
881 mapDescriptorToPriority.put(d, new Integer(p) );
882 descriptorsToVisitQ.add(new DescriptorQWrapper(p, d) );
883 descriptorsToVisitSet.add(d);
887 } else if( state.DISJOINTDVISITSTACK ||
888 state.DISJOINTDVISITSTACKEESONTOP
890 // if we're doing the stack scheme, just throw the root
891 // method or tasks on the stack
893 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
894 while( taskItr.hasNext() ) {
895 TaskDescriptor td = (TaskDescriptor) taskItr.next();
896 descriptorsToVisitStack.add(td);
897 descriptorsToVisitSet.add(td);
901 descriptorsToVisitStack.add(mdAnalysisEntry);
902 descriptorsToVisitSet.add(mdAnalysisEntry);
906 throw new Error("Unknown method scheduling mode");
910 // analyze scheduled methods until there are no more to visit
911 while( moreDescriptorsToVisit() ) {
914 if( state.DISJOINTDVISITSTACK ||
915 state.DISJOINTDVISITSTACKEESONTOP
917 d = descriptorsToVisitStack.pop();
919 } else if( state.DISJOINTDVISITPQUE ) {
920 d = descriptorsToVisitQ.poll().getDescriptor();
923 assert descriptorsToVisitSet.contains(d);
924 descriptorsToVisitSet.remove(d);
926 // because the task or method descriptor just extracted
927 // was in the "to visit" set it either hasn't been analyzed
928 // yet, or some method that it depends on has been
929 // updated. Recompute a complete reachability graph for
930 // this task/method and compare it to any previous result.
931 // If there is a change detected, add any methods/tasks
932 // that depend on this one to the "to visit" set.
934 if( !suppressOutput ) {
935 System.out.println("Analyzing " + d);
938 if( state.DISJOINTDVISITSTACKEESONTOP ) {
939 assert calleesToEnqueue.isEmpty();
942 ReachGraph rg = analyzeMethod(d);
943 ReachGraph rgPrev = getPartial(d);
945 if( !rg.equals(rgPrev) ) {
948 if( state.DISJOINTDEBUGSCHEDULING ) {
949 System.out.println(" complete graph changed, scheduling callers for analysis:");
952 // results for d changed, so enqueue dependents
953 // of d for further analysis
954 Iterator<Descriptor> depsItr = getDependents(d).iterator();
955 while( depsItr.hasNext() ) {
956 Descriptor dNext = depsItr.next();
959 if( state.DISJOINTDEBUGSCHEDULING ) {
960 System.out.println(" "+dNext);
965 // whether or not the method under analysis changed,
966 // we may have some callees that are scheduled for
967 // more analysis, and they should go on the top of
968 // the stack now (in other method-visiting modes they
969 // are already enqueued at this point
970 if( state.DISJOINTDVISITSTACKEESONTOP ) {
971 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
972 while( depsItr.hasNext() ) {
973 Descriptor dNext = depsItr.next();
976 calleesToEnqueue.clear();
982 protected ReachGraph analyzeMethod(Descriptor d)
983 throws java.io.IOException {
985 // get the flat code for this descriptor
987 if( d == mdAnalysisEntry ) {
988 fm = fmAnalysisEntry;
990 fm = state.getMethodFlat(d);
992 pm.analyzeMethod(fm);
994 // intraprocedural work set
995 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
996 flatNodesToVisit.add(fm);
998 // if determinism is desired by client, shadow the
999 // set with a queue to make visit order deterministic
1000 Queue<FlatNode> flatNodesToVisitQ = null;
1001 if( determinismDesired ) {
1002 flatNodesToVisitQ = new LinkedList<FlatNode>();
1003 flatNodesToVisitQ.add(fm);
1006 // mapping of current partial results
1007 Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph =
1008 new Hashtable<FlatNode, ReachGraph>();
1010 // the set of return nodes partial results that will be combined as
1011 // the final, conservative approximation of the entire method
1012 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
1016 boolean snapThisMethod = false;
1017 if( takeDebugSnapshots && d instanceof MethodDescriptor ) {
1018 MethodDescriptor mdThisMethod = (MethodDescriptor)d;
1019 ClassDescriptor cdThisMethod = mdThisMethod.getClassDesc();
1020 if( cdThisMethod != null ) {
1022 descSymbolDebug.equals( cdThisMethod.getSymbol()+
1024 mdThisMethod.getSymbol()
1031 while( !flatNodesToVisit.isEmpty() ) {
1034 if( determinismDesired ) {
1035 assert !flatNodesToVisitQ.isEmpty();
1036 fn = flatNodesToVisitQ.remove();
1038 fn = flatNodesToVisit.iterator().next();
1040 flatNodesToVisit.remove(fn);
1042 // effect transfer function defined by this node,
1043 // then compare it to the old graph at this node
1044 // to see if anything was updated.
1046 ReachGraph rg = new ReachGraph();
1047 TaskDescriptor taskDesc;
1048 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null) {
1049 if(mapDescriptorToReachGraph.containsKey(taskDesc)) {
1050 // retrieve existing reach graph if it is not first time
1051 rg=mapDescriptorToReachGraph.get(taskDesc);
1053 // create initial reach graph for a task
1054 rg=createInitialTaskReachGraph((FlatMethod)fn);
1056 mapDescriptorToReachGraph.put(taskDesc, rg);
1060 // start by merging all node's parents' graphs
1061 for( int i = 0; i < pm.numPrev(fn); ++i ) {
1062 FlatNode pn = pm.getPrev(fn,i);
1063 if( mapFlatNodeToReachGraph.containsKey(pn) ) {
1064 ReachGraph rgParent = mapFlatNodeToReachGraph.get(pn);
1070 if( snapThisMethod ) {
1071 debugSnapshot(rg, fn, true);
1075 // modify rg with appropriate transfer function
1076 rg = analyzeFlatNode(d, fm, fn, setReturns, rg);
1079 if( snapThisMethod ) {
1080 debugSnapshot(rg, fn, false);
1085 // if the results of the new graph are different from
1086 // the current graph at this node, replace the graph
1087 // with the update and enqueue the children
1088 ReachGraph rgPrev = mapFlatNodeToReachGraph.get(fn);
1089 if( !rg.equals(rgPrev) ) {
1090 mapFlatNodeToReachGraph.put(fn, rg);
1092 for( int i = 0; i < pm.numNext(fn); i++ ) {
1093 FlatNode nn = pm.getNext(fn, i);
1095 flatNodesToVisit.add(nn);
1096 if( determinismDesired ) {
1097 flatNodesToVisitQ.add(nn);
1104 // end by merging all return nodes into a complete
1105 // reach graph that represents all possible heap
1106 // states after the flat method returns
1107 ReachGraph completeGraph = new ReachGraph();
1109 assert !setReturns.isEmpty();
1110 Iterator retItr = setReturns.iterator();
1111 while( retItr.hasNext() ) {
1112 FlatReturnNode frn = (FlatReturnNode) retItr.next();
1114 assert mapFlatNodeToReachGraph.containsKey(frn);
1115 ReachGraph rgRet = mapFlatNodeToReachGraph.get(frn);
1117 completeGraph.merge(rgRet);
1121 if( snapThisMethod ) {
1122 // increment that we've visited the debug snap
1123 // method, and reset the node counter
1124 System.out.println(" @@@ debug snap at visit "+snapVisitCounter);
1126 snapNodeCounter = 0;
1128 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
1131 System.out.println("!!! Stopping analysis after debug snap captures. !!!");
1137 return completeGraph;
1141 protected ReachGraph
1142 analyzeFlatNode(Descriptor d,
1143 FlatMethod fmContaining,
1145 HashSet<FlatReturnNode> setRetNodes,
1147 ) throws java.io.IOException {
1150 // any variables that are no longer live should be
1151 // nullified in the graph to reduce edges
1152 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1156 FieldDescriptor fld;
1157 TypeDescriptor tdElement;
1158 FieldDescriptor fdElement;
1159 FlatSESEEnterNode sese;
1160 FlatSESEExitNode fsexn;
1162 //Stores the flatnode's reach graph at enter
1163 ReachGraph rgOnEnter = new ReachGraph();
1164 rgOnEnter.merge(rg);
1165 fn2rgAtEnter.put(fn, rgOnEnter);
1169 // use node type to decide what transfer function
1170 // to apply to the reachability graph
1171 switch( fn.kind() ) {
1173 case FKind.FlatGenReachNode: {
1174 FlatGenReachNode fgrn = (FlatGenReachNode) fn;
1176 System.out.println(" Generating reach graph for program point: "+fgrn.getGraphName() );
1178 rg.writeGraph("genReach"+fgrn.getGraphName(),
1179 true, // write labels (variables)
1180 true, // selectively hide intermediate temp vars
1181 true, // prune unreachable heap regions
1182 true, // hide reachability altogether
1183 true, // hide subset reachability states
1184 true, // hide predicates
1185 false); // hide edge taints
1189 case FKind.FlatMethod: {
1190 // construct this method's initial heap model (IHM)
1191 // since we're working on the FlatMethod, we know
1192 // the incoming ReachGraph 'rg' is empty
1194 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1195 getIHMcontributions(d);
1197 Set entrySet = heapsFromCallers.entrySet();
1198 Iterator itr = entrySet.iterator();
1199 while( itr.hasNext() ) {
1200 Map.Entry me = (Map.Entry)itr.next();
1201 FlatCall fc = (FlatCall) me.getKey();
1202 ReachGraph rgContrib = (ReachGraph) me.getValue();
1204 assert fc.getMethod().equals(d);
1206 rg.merge(rgContrib);
1209 // additionally, we are enforcing STRICT MONOTONICITY for the
1210 // method's initial context, so grow the context by whatever
1211 // the previously computed context was, and put the most
1212 // up-to-date context back in the map
1213 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get(d);
1214 rg.merge(rgPrevContext);
1215 mapDescriptorToInitialContext.put(d, rg);
1219 case FKind.FlatOpNode:
1220 FlatOpNode fon = (FlatOpNode) fn;
1221 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1222 lhs = fon.getDest();
1223 rhs = fon.getLeft();
1225 // before transfer, do effects analysis support
1226 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1227 if(rblockRel.isPotentialStallSite(fn)) {
1228 // x gets status of y
1229 // if(!rg.isAccessible(rhs)){
1230 if(!accessible.isAccessible(fn, rhs)) {
1231 rg.makeInaccessible(lhs);
1237 rg.assignTempXEqualToTempY(lhs, rhs);
1241 case FKind.FlatCastNode:
1242 FlatCastNode fcn = (FlatCastNode) fn;
1246 TypeDescriptor td = fcn.getType();
1249 // before transfer, do effects analysis support
1250 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1251 if(rblockRel.isPotentialStallSite(fn)) {
1252 // x gets status of y
1253 // if(!rg.isAccessible(rhs)){
1254 if(!accessible.isAccessible(fn,rhs)) {
1255 rg.makeInaccessible(lhs);
1261 rg.assignTempXEqualToCastedTempY(lhs, rhs, td);
1264 case FKind.FlatFieldNode:
1265 FlatFieldNode ffn = (FlatFieldNode) fn;
1269 fld = ffn.getField();
1271 // before graph transform, possible inject
1272 // a stall-site taint
1273 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1275 if(rblockRel.isPotentialStallSite(fn)) {
1276 // x=y.f, stall y if not accessible
1277 // contributes read effects on stall site of y
1278 // if(!rg.isAccessible(rhs)) {
1279 if(!accessible.isAccessible(fn,rhs)) {
1280 rg.taintStallSite(fn, rhs);
1283 // after this, x and y are accessbile.
1284 rg.makeAccessible(lhs);
1285 rg.makeAccessible(rhs);
1289 if( shouldAnalysisTrack(fld.getType() ) ) {
1291 rg.assignTempXEqualToTempYFieldF(lhs, rhs, fld, fn);
1294 // after transfer, use updated graph to
1295 // do effects analysis
1296 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1297 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fld, fn);
1301 case FKind.FlatSetFieldNode:
1302 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1304 lhs = fsfn.getDst();
1305 fld = fsfn.getField();
1306 rhs = fsfn.getSrc();
1308 boolean strongUpdate = false;
1310 // before transfer func, possibly inject
1311 // stall-site taints
1312 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1314 if(rblockRel.isPotentialStallSite(fn)) {
1315 // x.y=f , stall x and y if they are not accessible
1316 // also contribute write effects on stall site of x
1317 // if(!rg.isAccessible(lhs)) {
1318 if(!accessible.isAccessible(fn,lhs)) {
1319 rg.taintStallSite(fn, lhs);
1322 // if(!rg.isAccessible(rhs)) {
1323 if(!accessible.isAccessible(fn,rhs)) {
1324 rg.taintStallSite(fn, rhs);
1327 // accessible status update
1328 rg.makeAccessible(lhs);
1329 rg.makeAccessible(rhs);
1333 if( shouldAnalysisTrack(fld.getType() ) ) {
1335 strongUpdate = rg.assignTempXFieldFEqualToTempY(lhs, fld, rhs, fn);
1338 // use transformed graph to do effects analysis
1339 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1340 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fld, fn, strongUpdate);
1344 case FKind.FlatElementNode:
1345 FlatElementNode fen = (FlatElementNode) fn;
1350 assert rhs.getType() != null;
1351 assert rhs.getType().isArray();
1353 tdElement = rhs.getType().dereference();
1354 fdElement = getArrayField(tdElement);
1356 // before transfer func, possibly inject
1358 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1359 if(rblockRel.isPotentialStallSite(fn)) {
1360 // x=y.f, stall y if not accessible
1361 // contributes read effects on stall site of y
1362 // after this, x and y are accessbile.
1363 // if(!rg.isAccessible(rhs)) {
1364 if(!accessible.isAccessible(fn,rhs)) {
1365 rg.taintStallSite(fn, rhs);
1368 rg.makeAccessible(lhs);
1369 rg.makeAccessible(rhs);
1373 if( shouldAnalysisTrack(lhs.getType() ) ) {
1375 rg.assignTempXEqualToTempYFieldF(lhs, rhs, fdElement, fn);
1378 // use transformed graph to do effects analysis
1379 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1380 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fdElement, fn);
1384 case FKind.FlatSetElementNode:
1385 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1387 lhs = fsen.getDst();
1388 rhs = fsen.getSrc();
1390 assert lhs.getType() != null;
1391 assert lhs.getType().isArray();
1393 tdElement = lhs.getType().dereference();
1394 fdElement = getArrayField(tdElement);
1396 // before transfer func, possibly inject
1397 // stall-site taints
1398 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1400 if(rblockRel.isPotentialStallSite(fn)) {
1401 // x.y=f , stall x and y if they are not accessible
1402 // also contribute write effects on stall site of x
1403 // if(!rg.isAccessible(lhs)) {
1404 if(!accessible.isAccessible(fn,lhs)) {
1405 rg.taintStallSite(fn, lhs);
1408 // if(!rg.isAccessible(rhs)) {
1409 if(!accessible.isAccessible(fn,rhs)) {
1410 rg.taintStallSite(fn, rhs);
1413 // accessible status update
1414 rg.makeAccessible(lhs);
1415 rg.makeAccessible(rhs);
1419 if( shouldAnalysisTrack(rhs.getType() ) ) {
1420 // transfer func, BUT
1421 // skip this node if it cannot create new reachability paths
1422 if( !arrayReferencees.doesNotCreateNewReaching(fsen) ) {
1423 rg.assignTempXFieldFEqualToTempY(lhs, fdElement, rhs, fn);
1427 // use transformed graph to do effects analysis
1428 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1429 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fdElement, fn,
1435 FlatNew fnn = (FlatNew) fn;
1437 if( shouldAnalysisTrack(lhs.getType() ) ) {
1438 AllocSite as = getAllocSiteFromFlatNewPRIVATE(fnn);
1440 // before transform, support effects analysis
1441 if (doEffectsAnalysis && fmContaining != fmAnalysisEntry) {
1442 if (rblockRel.isPotentialStallSite(fn)) {
1443 // after creating new object, lhs is accessible
1444 rg.makeAccessible(lhs);
1449 rg.assignTempEqualToNewAlloc(lhs, as);
1453 case FKind.FlatSESEEnterNode:
1454 sese = (FlatSESEEnterNode) fn;
1456 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1458 // always remove ALL stall site taints at enter
1459 rg.removeAllStallSiteTaints();
1461 // inject taints for in-set vars
1462 rg.taintInSetVars(sese);
1467 case FKind.FlatSESEExitNode:
1468 fsexn = (FlatSESEExitNode) fn;
1469 sese = fsexn.getFlatEnter();
1471 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1473 // @ sese exit make all live variables
1474 // inaccessible to later parent statements
1475 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
1477 // always remove ALL stall site taints at exit
1478 rg.removeAllStallSiteTaints();
1480 // remove in-set var taints for the exiting rblock
1481 rg.removeInContextTaints(sese);
1486 case FKind.FlatCall: {
1487 Descriptor mdCaller;
1488 if( fmContaining.getMethod() != null ) {
1489 mdCaller = fmContaining.getMethod();
1491 mdCaller = fmContaining.getTask();
1493 FlatCall fc = (FlatCall) fn;
1494 MethodDescriptor mdCallee = fc.getMethod();
1495 FlatMethod fmCallee = state.getMethodFlat(mdCallee);
1499 // all this jimma jamma to debug call sites is WELL WORTH the
1500 // effort, so many bugs or buggy info goes crazy through call
1502 boolean debugCallSite = false;
1503 if( state.DISJOINTDEBUGCALLEE != null &&
1504 state.DISJOINTDEBUGCALLER != null ) {
1506 boolean debugCalleeMatches = false;
1507 boolean debugCallerMatches = false;
1509 ClassDescriptor cdCallee = mdCallee.getClassDesc();
1510 if( cdCallee != null ) {
1511 debugCalleeMatches =
1512 state.DISJOINTDEBUGCALLEE.equals( cdCallee.getSymbol()+
1514 mdCallee.getSymbol()
1519 if( mdCaller instanceof MethodDescriptor ) {
1520 ClassDescriptor cdCaller = ((MethodDescriptor)mdCaller).getClassDesc();
1521 if( cdCaller != null ) {
1522 debugCallerMatches =
1523 state.DISJOINTDEBUGCALLER.equals( cdCaller.getSymbol()+
1525 mdCaller.getSymbol()
1529 // for bristlecone style tasks
1530 debugCallerMatches =
1531 state.DISJOINTDEBUGCALLER.equals( mdCaller.getSymbol() );
1534 debugCallSite = debugCalleeMatches && debugCallerMatches;
1540 boolean writeDebugDOTs = false;
1541 boolean stopAfter = false;
1542 if( debugCallSite ) {
1543 ++ReachGraph.debugCallSiteVisitCounter;
1544 System.out.println(" $$$ Debug call site visit "+
1545 ReachGraph.debugCallSiteVisitCounter+
1549 (ReachGraph.debugCallSiteVisitCounter >=
1550 ReachGraph.debugCallSiteVisitStartCapture) &&
1552 (ReachGraph.debugCallSiteVisitCounter <
1553 ReachGraph.debugCallSiteVisitStartCapture +
1554 ReachGraph.debugCallSiteNumVisitsToCapture)
1556 writeDebugDOTs = true;
1557 System.out.println(" $$$ Capturing this call site visit $$$");
1558 if( ReachGraph.debugCallSiteStopAfter &&
1559 (ReachGraph.debugCallSiteVisitCounter ==
1560 ReachGraph.debugCallSiteVisitStartCapture +
1561 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
1569 // calculate the heap this call site can reach--note this is
1570 // not used for the current call site transform, we are
1571 // grabbing this heap model for future analysis of the callees,
1572 // so if different results emerge we will return to this site
1573 ReachGraph heapForThisCall_old =
1574 getIHMcontribution(mdCallee, fc);
1576 // the computation of the callee-reachable heap
1577 // is useful for making the callee starting point
1578 // and for applying the call site transfer function
1579 Set<Integer> callerNodeIDsCopiedToCallee =
1580 new HashSet<Integer>();
1582 ReachGraph heapForThisCall_cur =
1583 rg.makeCalleeView(fc,
1585 callerNodeIDsCopiedToCallee,
1589 // enforce that a call site contribution can only
1590 // monotonically increase
1591 heapForThisCall_cur.merge(heapForThisCall_old);
1593 if( !heapForThisCall_cur.equals(heapForThisCall_old) ) {
1594 // if heap at call site changed, update the contribution,
1595 // and reschedule the callee for analysis
1596 addIHMcontribution(mdCallee, fc, heapForThisCall_cur);
1598 // map a FlatCall to its enclosing method/task descriptor
1599 // so we can write that info out later
1600 fc2enclosing.put(fc, mdCaller);
1602 if( state.DISJOINTDEBUGSCHEDULING ) {
1603 System.out.println(" context changed, scheduling callee: "+mdCallee);
1606 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1607 calleesToEnqueue.add(mdCallee);
1614 // the transformation for a call site should update the
1615 // current heap abstraction with any effects from the callee,
1616 // or if the method is virtual, the effects from any possible
1617 // callees, so find the set of callees...
1618 Set<MethodDescriptor> setPossibleCallees;
1619 if( determinismDesired ) {
1620 // use an ordered set
1621 setPossibleCallees = new TreeSet<MethodDescriptor>(dComp);
1623 // otherwise use a speedy hashset
1624 setPossibleCallees = new HashSet<MethodDescriptor>();
1627 if( mdCallee.isStatic() ) {
1628 setPossibleCallees.add(mdCallee);
1630 TypeDescriptor typeDesc = fc.getThis().getType();
1631 setPossibleCallees.addAll(callGraph.getMethods(mdCallee,
1636 ReachGraph rgMergeOfPossibleCallers = new ReachGraph();
1638 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1639 while( mdItr.hasNext() ) {
1640 MethodDescriptor mdPossible = mdItr.next();
1641 FlatMethod fmPossible = state.getMethodFlat(mdPossible);
1643 addDependent(mdPossible, // callee
1646 // don't alter the working graph (rg) until we compute a
1647 // result for every possible callee, merge them all together,
1648 // then set rg to that
1649 ReachGraph rgPossibleCaller = new ReachGraph();
1650 rgPossibleCaller.merge(rg);
1652 ReachGraph rgPossibleCallee = getPartial(mdPossible);
1654 if( rgPossibleCallee == null ) {
1655 // if this method has never been analyzed just schedule it
1656 // for analysis and skip over this call site for now
1657 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1658 calleesToEnqueue.add(mdPossible);
1660 enqueue(mdPossible);
1663 if( state.DISJOINTDEBUGSCHEDULING ) {
1664 System.out.println(" callee hasn't been analyzed, scheduling: "+mdPossible);
1668 // calculate the method call transform
1669 rgPossibleCaller.resolveMethodCall(fc,
1672 callerNodeIDsCopiedToCallee,
1676 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1677 // if( !rgPossibleCallee.isAccessible( ReachGraph.tdReturn ) ) {
1678 if( !accessible.isAccessible(fn, ReachGraph.tdReturn) ) {
1679 rgPossibleCaller.makeInaccessible(fc.getReturnTemp() );
1685 rgMergeOfPossibleCallers.merge(rgPossibleCaller);
1690 System.out.println("$$$ Exiting after requested captures of call site. $$$");
1695 // now that we've taken care of building heap models for
1696 // callee analysis, finish this transformation
1697 rg = rgMergeOfPossibleCallers;
1700 // jjenista: what is this? It breaks compilation
1701 // of programs with no tasks/SESEs/rblocks...
1702 //XXXXXXXXXXXXXXXXXXXXXXXXX
1703 //need to consider more
1704 FlatNode nextFN=fmCallee.getNext(0);
1705 if( nextFN instanceof FlatSESEEnterNode ) {
1706 FlatSESEEnterNode calleeSESE=(FlatSESEEnterNode)nextFN;
1707 if(!calleeSESE.getIsLeafSESE()) {
1708 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
1715 case FKind.FlatReturnNode:
1716 FlatReturnNode frn = (FlatReturnNode) fn;
1717 rhs = frn.getReturnTemp();
1719 // before transfer, do effects analysis support
1720 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1721 // if(!rg.isAccessible(rhs)){
1722 if(!accessible.isAccessible(fn,rhs)) {
1723 rg.makeInaccessible(ReachGraph.tdReturn);
1727 if( rhs != null && shouldAnalysisTrack(rhs.getType() ) ) {
1728 rg.assignReturnEqualToTemp(rhs);
1731 setRetNodes.add(frn);
1737 // dead variables were removed before the above transfer function
1738 // was applied, so eliminate heap regions and edges that are no
1739 // longer part of the abstractly-live heap graph, and sweep up
1740 // and reachability effects that are altered by the reduction
1741 //rg.abstractGarbageCollect();
1745 // back edges are strictly monotonic
1746 if( pm.isBackEdge(fn) ) {
1747 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get(fn);
1748 rg.merge(rgPrevResult);
1749 mapBackEdgeToMonotone.put(fn, rg);
1752 // at this point rg should be the correct update
1753 // by an above transfer function, or untouched if
1754 // the flat node type doesn't affect the heap
1760 // this method should generate integers strictly greater than zero!
1761 // special "shadow" regions are made from a heap region by negating
1763 static public Integer generateUniqueHeapRegionNodeID() {
1765 return new Integer(uniqueIDcount);
1770 static public FieldDescriptor getArrayField(TypeDescriptor tdElement) {
1771 FieldDescriptor fdElement = mapTypeToArrayField.get(tdElement);
1772 if( fdElement == null ) {
1773 fdElement = new FieldDescriptor(new Modifiers(Modifiers.PUBLIC),
1775 arrayElementFieldName,
1778 mapTypeToArrayField.put(tdElement, fdElement);
1785 private void writeFinalGraphs() {
1786 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
1787 Iterator itr = entrySet.iterator();
1788 while( itr.hasNext() ) {
1789 Map.Entry me = (Map.Entry)itr.next();
1790 Descriptor d = (Descriptor) me.getKey();
1791 ReachGraph rg = (ReachGraph) me.getValue();
1794 if( d instanceof TaskDescriptor ) {
1795 graphName = "COMPLETEtask"+d;
1797 graphName = "COMPLETE"+d;
1800 rg.writeGraph(graphName,
1801 true, // write labels (variables)
1802 true, // selectively hide intermediate temp vars
1803 true, // prune unreachable heap regions
1804 true, // hide reachability altogether
1805 true, // hide subset reachability states
1806 true, // hide predicates
1807 false); // hide edge taints
1811 private void writeFinalIHMs() {
1812 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
1813 while( d2IHMsItr.hasNext() ) {
1814 Map.Entry me1 = (Map.Entry)d2IHMsItr.next();
1815 Descriptor d = (Descriptor) me1.getKey();
1816 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>)me1.getValue();
1818 Iterator fc2rgItr = IHMs.entrySet().iterator();
1819 while( fc2rgItr.hasNext() ) {
1820 Map.Entry me2 = (Map.Entry)fc2rgItr.next();
1821 FlatCall fc = (FlatCall) me2.getKey();
1822 ReachGraph rg = (ReachGraph) me2.getValue();
1824 rg.writeGraph("IHMPARTFOR"+d+"FROM"+fc2enclosing.get(fc)+fc,
1825 true, // write labels (variables)
1826 true, // selectively hide intermediate temp vars
1827 true, // hide reachability altogether
1828 true, // prune unreachable heap regions
1829 true, // hide subset reachability states
1830 false, // hide predicates
1831 true); // hide edge taints
1836 private void writeInitialContexts() {
1837 Set entrySet = mapDescriptorToInitialContext.entrySet();
1838 Iterator itr = entrySet.iterator();
1839 while( itr.hasNext() ) {
1840 Map.Entry me = (Map.Entry)itr.next();
1841 Descriptor d = (Descriptor) me.getKey();
1842 ReachGraph rg = (ReachGraph) me.getValue();
1844 rg.writeGraph("INITIAL"+d,
1845 true, // write labels (variables)
1846 true, // selectively hide intermediate temp vars
1847 true, // prune unreachable heap regions
1848 false, // hide all reachability
1849 true, // hide subset reachability states
1850 true, // hide predicates
1851 false); // hide edge taints
1856 protected ReachGraph getPartial(Descriptor d) {
1857 return mapDescriptorToCompleteReachGraph.get(d);
1860 protected void setPartial(Descriptor d, ReachGraph rg) {
1861 mapDescriptorToCompleteReachGraph.put(d, rg);
1863 // when the flag for writing out every partial
1864 // result is set, we should spit out the graph,
1865 // but in order to give it a unique name we need
1866 // to track how many partial results for this
1867 // descriptor we've already written out
1868 if( writeAllIncrementalDOTs ) {
1869 if( !mapDescriptorToNumUpdates.containsKey(d) ) {
1870 mapDescriptorToNumUpdates.put(d, new Integer(0) );
1872 Integer n = mapDescriptorToNumUpdates.get(d);
1875 if( d instanceof TaskDescriptor ) {
1876 graphName = d+"COMPLETEtask"+String.format("%05d", n);
1878 graphName = d+"COMPLETE"+String.format("%05d", n);
1881 rg.writeGraph(graphName,
1882 true, // write labels (variables)
1883 true, // selectively hide intermediate temp vars
1884 true, // prune unreachable heap regions
1885 false, // hide all reachability
1886 true, // hide subset reachability states
1887 false, // hide predicates
1888 false); // hide edge taints
1890 mapDescriptorToNumUpdates.put(d, n + 1);
1896 // return just the allocation site associated with one FlatNew node
1897 protected AllocSite getAllocSiteFromFlatNewPRIVATE(FlatNew fnew) {
1899 boolean flagProgrammatically = false;
1900 if( sitesToFlag != null && sitesToFlag.contains(fnew) ) {
1901 flagProgrammatically = true;
1904 if( !mapFlatNewToAllocSite.containsKey(fnew) ) {
1905 AllocSite as = AllocSite.factory(allocationDepth,
1907 fnew.getDisjointId(),
1908 flagProgrammatically
1911 // the newest nodes are single objects
1912 for( int i = 0; i < allocationDepth; ++i ) {
1913 Integer id = generateUniqueHeapRegionNodeID();
1914 as.setIthOldest(i, id);
1915 mapHrnIdToAllocSite.put(id, as);
1918 // the oldest node is a summary node
1919 as.setSummary(generateUniqueHeapRegionNodeID() );
1921 mapFlatNewToAllocSite.put(fnew, as);
1924 return mapFlatNewToAllocSite.get(fnew);
1928 public static boolean shouldAnalysisTrack(TypeDescriptor type) {
1929 // don't track primitive types, but an array
1930 // of primitives is heap memory
1931 if( type.isImmutable() ) {
1932 return type.isArray();
1935 // everything else is an object
1939 protected int numMethodsAnalyzed() {
1940 return descriptorsToAnalyze.size();
1947 // Take in source entry which is the program's compiled entry and
1948 // create a new analysis entry, a method that takes no parameters
1949 // and appears to allocate the command line arguments and call the
1950 // source entry with them. The purpose of this analysis entry is
1951 // to provide a top-level method context with no parameters left.
1952 protected void makeAnalysisEntryMethod(MethodDescriptor mdSourceEntry) {
1954 Modifiers mods = new Modifiers();
1955 mods.addModifier(Modifiers.PUBLIC);
1956 mods.addModifier(Modifiers.STATIC);
1958 TypeDescriptor returnType =
1959 new TypeDescriptor(TypeDescriptor.VOID);
1961 this.mdAnalysisEntry =
1962 new MethodDescriptor(mods,
1964 "analysisEntryMethod"
1967 TempDescriptor cmdLineArgs =
1968 new TempDescriptor("args",
1969 mdSourceEntry.getParamType(0)
1973 new FlatNew(mdSourceEntry.getParamType(0),
1978 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
1979 sourceEntryArgs[0] = cmdLineArgs;
1982 new FlatCall(mdSourceEntry,
1988 FlatReturnNode frn = new FlatReturnNode(null);
1990 FlatExit fe = new FlatExit();
1992 this.fmAnalysisEntry =
1993 new FlatMethod(mdAnalysisEntry,
1997 this.fmAnalysisEntry.addNext(fn);
2004 protected LinkedList<Descriptor> topologicalSort(Set<Descriptor> toSort) {
2006 Set<Descriptor> discovered;
2008 if( determinismDesired ) {
2009 // use an ordered set
2010 discovered = new TreeSet<Descriptor>(dComp);
2012 // otherwise use a speedy hashset
2013 discovered = new HashSet<Descriptor>();
2016 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
2018 Iterator<Descriptor> itr = toSort.iterator();
2019 while( itr.hasNext() ) {
2020 Descriptor d = itr.next();
2022 if( !discovered.contains(d) ) {
2023 dfsVisit(d, toSort, sorted, discovered);
2030 // While we're doing DFS on call graph, remember
2031 // dependencies for efficient queuing of methods
2032 // during interprocedural analysis:
2034 // a dependent of a method decriptor d for this analysis is:
2035 // 1) a method or task that invokes d
2036 // 2) in the descriptorsToAnalyze set
2037 protected void dfsVisit(Descriptor d,
2038 Set <Descriptor> toSort,
2039 LinkedList<Descriptor> sorted,
2040 Set <Descriptor> discovered) {
2043 // only methods have callers, tasks never do
2044 if( d instanceof MethodDescriptor ) {
2046 MethodDescriptor md = (MethodDescriptor) d;
2048 // the call graph is not aware that we have a fabricated
2049 // analysis entry that calls the program source's entry
2050 if( md == mdSourceEntry ) {
2051 if( !discovered.contains(mdAnalysisEntry) ) {
2052 addDependent(mdSourceEntry, // callee
2053 mdAnalysisEntry // caller
2055 dfsVisit(mdAnalysisEntry, toSort, sorted, discovered);
2059 // otherwise call graph guides DFS
2060 Iterator itr = callGraph.getCallerSet(md).iterator();
2061 while( itr.hasNext() ) {
2062 Descriptor dCaller = (Descriptor) itr.next();
2064 // only consider callers in the original set to analyze
2065 if( !toSort.contains(dCaller) ) {
2069 if( !discovered.contains(dCaller) ) {
2070 addDependent(md, // callee
2074 dfsVisit(dCaller, toSort, sorted, discovered);
2079 // for leaf-nodes last now!
2084 protected void enqueue(Descriptor d) {
2086 if( !descriptorsToVisitSet.contains(d) ) {
2088 if( state.DISJOINTDVISITSTACK ||
2089 state.DISJOINTDVISITSTACKEESONTOP
2091 descriptorsToVisitStack.add(d);
2093 } else if( state.DISJOINTDVISITPQUE ) {
2094 Integer priority = mapDescriptorToPriority.get(d);
2095 descriptorsToVisitQ.add(new DescriptorQWrapper(priority,
2100 descriptorsToVisitSet.add(d);
2105 // a dependent of a method decriptor d for this analysis is:
2106 // 1) a method or task that invokes d
2107 // 2) in the descriptorsToAnalyze set
2108 protected void addDependent(Descriptor callee, Descriptor caller) {
2109 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2110 if( deps == null ) {
2111 deps = new HashSet<Descriptor>();
2114 mapDescriptorToSetDependents.put(callee, deps);
2117 protected Set<Descriptor> getDependents(Descriptor callee) {
2118 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2119 if( deps == null ) {
2120 deps = new HashSet<Descriptor>();
2121 mapDescriptorToSetDependents.put(callee, deps);
2127 public Hashtable<FlatCall, ReachGraph> getIHMcontributions(Descriptor d) {
2129 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2130 mapDescriptorToIHMcontributions.get(d);
2132 if( heapsFromCallers == null ) {
2133 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
2134 mapDescriptorToIHMcontributions.put(d, heapsFromCallers);
2137 return heapsFromCallers;
2140 public ReachGraph getIHMcontribution(Descriptor d,
2143 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2144 getIHMcontributions(d);
2146 if( !heapsFromCallers.containsKey(fc) ) {
2150 return heapsFromCallers.get(fc);
2154 public void addIHMcontribution(Descriptor d,
2158 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2159 getIHMcontributions(d);
2161 heapsFromCallers.put(fc, rg);
2165 private AllocSite createParameterAllocSite(ReachGraph rg,
2166 TempDescriptor tempDesc,
2172 flatNew = new FlatNew(tempDesc.getType(), // type
2173 tempDesc, // param temp
2174 false, // global alloc?
2175 "param"+tempDesc // disjoint site ID string
2178 flatNew = new FlatNew(tempDesc.getType(), // type
2179 tempDesc, // param temp
2180 false, // global alloc?
2181 null // disjoint site ID string
2185 // create allocation site
2186 AllocSite as = AllocSite.factory(allocationDepth,
2188 flatNew.getDisjointId(),
2191 for (int i = 0; i < allocationDepth; ++i) {
2192 Integer id = generateUniqueHeapRegionNodeID();
2193 as.setIthOldest(i, id);
2194 mapHrnIdToAllocSite.put(id, as);
2196 // the oldest node is a summary node
2197 as.setSummary(generateUniqueHeapRegionNodeID() );
2205 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc) {
2207 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
2208 if(!typeDesc.isImmutable()) {
2209 ClassDescriptor classDesc = typeDesc.getClassDesc();
2210 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2211 FieldDescriptor field = (FieldDescriptor) it.next();
2212 TypeDescriptor fieldType = field.getType();
2213 if (shouldAnalysisTrack(fieldType)) {
2214 fieldSet.add(field);
2222 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha) {
2224 int dimCount=fd.getType().getArrayCount();
2225 HeapRegionNode prevNode=null;
2226 HeapRegionNode arrayEntryNode=null;
2227 for(int i=dimCount; i>0; i--) {
2228 TypeDescriptor typeDesc=fd.getType().dereference(); //hack to get instance of type desc
2229 typeDesc.setArrayCount(i);
2230 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
2231 HeapRegionNode hrnSummary;
2232 if(!mapToExistingNode.containsKey(typeDesc)) {
2237 as = createParameterAllocSite(rg, tempDesc, false);
2239 // make a new reference to allocated node
2241 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2242 false, // single object?
2244 false, // out-of-context?
2245 as.getType(), // type
2246 as, // allocation site
2247 alpha, // inherent reach
2248 alpha, // current reach
2249 ExistPredSet.factory(rg.predTrue), // predicates
2250 tempDesc.toString() // description
2252 rg.id2hrn.put(as.getSummary(),hrnSummary);
2254 mapToExistingNode.put(typeDesc, hrnSummary);
2256 hrnSummary=mapToExistingNode.get(typeDesc);
2259 if(prevNode==null) {
2260 // make a new reference between new summary node and source
2261 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2264 fd.getSymbol(), // field name
2266 ExistPredSet.factory(rg.predTrue), // predicates
2270 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2271 prevNode=hrnSummary;
2272 arrayEntryNode=hrnSummary;
2274 // make a new reference between summary nodes of array
2275 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2278 arrayElementFieldName, // field name
2280 ExistPredSet.factory(rg.predTrue), // predicates
2284 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2285 prevNode=hrnSummary;
2290 // create a new obj node if obj has at least one non-primitive field
2291 TypeDescriptor type=fd.getType();
2292 if(getFieldSetTobeAnalyzed(type).size()>0) {
2293 TypeDescriptor typeDesc=type.dereference();
2294 typeDesc.setArrayCount(0);
2295 if(!mapToExistingNode.containsKey(typeDesc)) {
2296 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
2297 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
2298 // make a new reference to allocated node
2299 HeapRegionNode hrnSummary =
2300 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2301 false, // single object?
2303 false, // out-of-context?
2305 as, // allocation site
2306 alpha, // inherent reach
2307 alpha, // current reach
2308 ExistPredSet.factory(rg.predTrue), // predicates
2309 tempDesc.toString() // description
2311 rg.id2hrn.put(as.getSummary(),hrnSummary);
2312 mapToExistingNode.put(typeDesc, hrnSummary);
2313 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2316 arrayElementFieldName, // field name
2318 ExistPredSet.factory(rg.predTrue), // predicates
2321 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2322 prevNode=hrnSummary;
2324 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
2325 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null) {
2326 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2329 arrayElementFieldName, // field name
2331 ExistPredSet.factory(rg.predTrue), // predicates
2334 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2336 prevNode=hrnSummary;
2340 map.put(arrayEntryNode, prevNode);
2341 return arrayEntryNode;
2344 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
2345 ReachGraph rg = new ReachGraph();
2346 TaskDescriptor taskDesc = fm.getTask();
2348 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
2349 Descriptor paramDesc = taskDesc.getParameter(idx);
2350 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
2352 // setup data structure
2353 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
2354 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
2355 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
2356 new Hashtable<TypeDescriptor, HeapRegionNode>();
2357 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
2358 new Hashtable<HeapRegionNode, HeapRegionNode>();
2359 Set<String> doneSet = new HashSet<String>();
2361 TempDescriptor tempDesc = fm.getParameter(idx);
2363 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
2364 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
2365 Integer idNewest = as.getIthOldest(0);
2366 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
2368 // make a new reference to allocated node
2369 RefEdge edgeNew = new RefEdge(lnX, // source
2371 taskDesc.getParamType(idx), // type
2373 hrnNewest.getAlpha(), // beta
2374 ExistPredSet.factory(rg.predTrue), // predicates
2377 rg.addRefEdge(lnX, hrnNewest, edgeNew);
2379 // set-up a work set for class field
2380 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
2381 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2382 FieldDescriptor fd = (FieldDescriptor) it.next();
2383 TypeDescriptor fieldType = fd.getType();
2384 if (shouldAnalysisTrack(fieldType)) {
2385 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
2386 newMap.put(hrnNewest, fd);
2387 workSet.add(newMap);
2391 int uniqueIdentifier = 0;
2392 while (!workSet.isEmpty()) {
2393 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
2395 workSet.remove(map);
2397 Set<HeapRegionNode> key = map.keySet();
2398 HeapRegionNode srcHRN = key.iterator().next();
2399 FieldDescriptor fd = map.get(srcHRN);
2400 TypeDescriptor type = fd.getType();
2401 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
2403 if (!doneSet.contains(doneSetIdentifier)) {
2404 doneSet.add(doneSetIdentifier);
2405 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
2406 // create new summary Node
2407 TempDescriptor td = new TempDescriptor("temp"
2408 + uniqueIdentifier, type);
2410 AllocSite allocSite;
2411 if(type.equals(paramTypeDesc)) {
2412 //corresponding allocsite has already been created for a parameter variable.
2415 allocSite = createParameterAllocSite(rg, td, false);
2417 String strDesc = allocSite.toStringForDOT()
2419 TypeDescriptor allocType=allocSite.getType();
2421 HeapRegionNode hrnSummary;
2422 if(allocType.isArray() && allocType.getArrayCount()>0) {
2423 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
2426 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
2427 false, // single object?
2429 false, // out-of-context?
2430 allocSite.getType(), // type
2431 allocSite, // allocation site
2432 hrnNewest.getAlpha(), // inherent reach
2433 hrnNewest.getAlpha(), // current reach
2434 ExistPredSet.factory(rg.predTrue), // predicates
2435 strDesc // description
2437 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
2439 // make a new reference to summary node
2440 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2443 fd.getSymbol(), // field name
2444 hrnNewest.getAlpha(), // beta
2445 ExistPredSet.factory(rg.predTrue), // predicates
2449 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2453 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2455 // set-up a work set for fields of the class
2456 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2457 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2459 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2461 HeapRegionNode newDstHRN;
2462 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)) {
2463 //related heap region node is already exsited.
2464 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2466 newDstHRN=hrnSummary;
2468 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2469 if(!doneSet.contains(doneSetIdentifier)) {
2470 // add new work item
2471 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2472 new HashMap<HeapRegionNode, FieldDescriptor>();
2473 newMap.put(newDstHRN, fieldDescriptor);
2474 workSet.add(newMap);
2479 // if there exists corresponding summary node
2480 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2482 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2484 fd.getType(), // type
2485 fd.getSymbol(), // field name
2486 srcHRN.getAlpha(), // beta
2487 ExistPredSet.factory(rg.predTrue), // predicates
2490 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2500 // return all allocation sites in the method (there is one allocation
2501 // site per FlatNew node in a method)
2502 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2503 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2504 buildAllocationSiteSet(d);
2507 return mapDescriptorToAllocSiteSet.get(d);
2511 private void buildAllocationSiteSet(Descriptor d) {
2512 HashSet<AllocSite> s = new HashSet<AllocSite>();
2515 if( d instanceof MethodDescriptor ) {
2516 fm = state.getMethodFlat( (MethodDescriptor) d);
2518 assert d instanceof TaskDescriptor;
2519 fm = state.getMethodFlat( (TaskDescriptor) d);
2521 pm.analyzeMethod(fm);
2523 // visit every node in this FlatMethod's IR graph
2524 // and make a set of the allocation sites from the
2525 // FlatNew node's visited
2526 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2527 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2530 while( !toVisit.isEmpty() ) {
2531 FlatNode n = toVisit.iterator().next();
2533 if( n instanceof FlatNew ) {
2534 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2540 for( int i = 0; i < pm.numNext(n); ++i ) {
2541 FlatNode child = pm.getNext(n, i);
2542 if( !visited.contains(child) ) {
2548 mapDescriptorToAllocSiteSet.put(d, s);
2551 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
2553 HashSet<AllocSite> out = new HashSet<AllocSite>();
2554 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2555 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2559 while (!toVisit.isEmpty()) {
2560 Descriptor d = toVisit.iterator().next();
2564 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2565 Iterator asItr = asSet.iterator();
2566 while (asItr.hasNext()) {
2567 AllocSite as = (AllocSite) asItr.next();
2568 if (as.getDisjointAnalysisId() != null) {
2573 // enqueue callees of this method to be searched for
2574 // allocation sites also
2575 Set callees = callGraph.getCalleeSet(d);
2576 if (callees != null) {
2577 Iterator methItr = callees.iterator();
2578 while (methItr.hasNext()) {
2579 MethodDescriptor md = (MethodDescriptor) methItr.next();
2581 if (!visited.contains(md)) {
2592 private HashSet<AllocSite>
2593 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
2595 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
2596 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2597 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2601 // traverse this task and all methods reachable from this task
2602 while( !toVisit.isEmpty() ) {
2603 Descriptor d = toVisit.iterator().next();
2607 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2608 Iterator asItr = asSet.iterator();
2609 while( asItr.hasNext() ) {
2610 AllocSite as = (AllocSite) asItr.next();
2611 TypeDescriptor typed = as.getType();
2612 if( typed != null ) {
2613 ClassDescriptor cd = typed.getClassDesc();
2614 if( cd != null && cd.hasFlags() ) {
2620 // enqueue callees of this method to be searched for
2621 // allocation sites also
2622 Set callees = callGraph.getCalleeSet(d);
2623 if( callees != null ) {
2624 Iterator methItr = callees.iterator();
2625 while( methItr.hasNext() ) {
2626 MethodDescriptor md = (MethodDescriptor) methItr.next();
2628 if( !visited.contains(md) ) {
2638 public Set<Descriptor> getDescriptorsToAnalyze() {
2639 return descriptorsToAnalyze;
2642 public EffectsAnalysis getEffectsAnalysis() {
2643 return effectsAnalysis;
2646 public ReachGraph getReachGraph(Descriptor d) {
2647 return mapDescriptorToCompleteReachGraph.get(d);
2650 public ReachGraph getEnterReachGraph(FlatNode fn) {
2651 return fn2rgAtEnter.get(fn);
2654 // get successive captures of the analysis state, use compiler
2656 boolean takeDebugSnapshots = false;
2657 String descSymbolDebug = null;
2658 boolean stopAfterCapture = false;
2659 int snapVisitCounter = 0;
2660 int snapNodeCounter = 0;
2661 int visitStartCapture = 0;
2662 int numVisitsToCapture = 0;
2665 void debugSnapshot(ReachGraph rg, FlatNode fn, boolean in) {
2666 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
2674 if( snapVisitCounter >= visitStartCapture ) {
2675 System.out.println(" @@@ snapping visit="+snapVisitCounter+
2676 ", node="+snapNodeCounter+
2680 graphName = String.format("snap%03d_%04din",
2684 graphName = String.format("snap%03d_%04dout",
2689 graphName = graphName + fn;
2691 rg.writeGraph(graphName,
2692 true, // write labels (variables)
2693 true, // selectively hide intermediate temp vars
2694 true, // prune unreachable heap regions
2695 false, // hide reachability
2696 false, // hide subset reachability states
2697 true, // hide predicates
2698 true); // hide edge taints