1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
6 import Analysis.OoOJava.Accessible;
7 import Analysis.OoOJava.RBlockRelationAnalysis;
8 import Analysis.FlatIRGraph.*;
11 import IR.Tree.Modifiers;
16 public class DisjointAnalysis implements HeapAnalysis {
19 ///////////////////////////////////////////
21 // Public interface to discover possible
22 // sharing in the program under analysis
24 ///////////////////////////////////////////
26 // if an object allocated at the target site may be
27 // reachable from both an object from root1 and an
28 // object allocated at root2, return TRUE
29 public boolean mayBothReachTarget(FlatMethod fm,
34 AllocSite asr1 = getAllocationSiteFromFlatNew(fnRoot1);
35 AllocSite asr2 = getAllocationSiteFromFlatNew(fnRoot2);
36 assert asr1.isFlagged();
37 assert asr2.isFlagged();
39 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
40 ReachGraph rg = getPartial(fm.getMethod() );
42 return rg.mayBothReachTarget(asr1, asr2, ast);
45 // similar to the method above, return TRUE if ever
46 // more than one object from the root allocation site
47 // may reach an object from the target site
48 public boolean mayManyReachTarget(FlatMethod fm,
52 AllocSite asr = getAllocationSiteFromFlatNew(fnRoot);
53 assert asr.isFlagged();
55 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
56 ReachGraph rg = getPartial(fm.getMethod() );
58 return rg.mayManyReachTarget(asr, ast);
64 public HashSet<AllocSite>
65 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
66 checkAnalysisComplete();
67 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
70 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
71 checkAnalysisComplete();
72 return getAllocSiteFromFlatNewPRIVATE(fn);
75 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
76 checkAnalysisComplete();
77 return mapHrnIdToAllocSite.get(id);
80 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
83 checkAnalysisComplete();
84 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
85 FlatMethod fm=state.getMethodFlat(taskOrMethod);
87 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
90 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
91 int paramIndex, AllocSite alloc) {
92 checkAnalysisComplete();
93 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
94 FlatMethod fm=state.getMethodFlat(taskOrMethod);
96 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
99 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
100 AllocSite alloc, int paramIndex) {
101 checkAnalysisComplete();
102 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
103 FlatMethod fm=state.getMethodFlat(taskOrMethod);
105 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
108 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
109 AllocSite alloc1, AllocSite alloc2) {
110 checkAnalysisComplete();
111 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
113 return rg.mayReachSharedObjects(alloc1, alloc2);
116 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
117 checkAnalysisComplete();
121 Iterator<HeapRegionNode> i = s.iterator();
122 while (i.hasNext()) {
123 HeapRegionNode n = i.next();
125 AllocSite as = n.getAllocSite();
127 out += " " + n.toString() + ",\n";
129 out += " " + n.toString() + ": " + as.toStringVerbose()
138 // use the methods given above to check every possible sharing class
139 // between task parameters and flagged allocation sites reachable
141 public void writeAllSharing(String outputFile,
144 boolean tabularOutput,
147 throws java.io.IOException {
148 checkAnalysisComplete();
150 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
152 if (!tabularOutput) {
153 bw.write("Conducting ownership analysis with allocation depth = "
154 + allocationDepth + "\n");
155 bw.write(timeReport + "\n");
160 // look through every task for potential sharing
161 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
162 while (taskItr.hasNext()) {
163 TaskDescriptor td = (TaskDescriptor) taskItr.next();
165 if (!tabularOutput) {
166 bw.write("\n---------" + td + "--------\n");
169 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
171 Set<HeapRegionNode> common;
173 // for each task parameter, check for sharing classes with
174 // other task parameters and every allocation site
175 // reachable from this task
176 boolean foundSomeSharing = false;
178 FlatMethod fm = state.getMethodFlat(td);
179 for (int i = 0; i < fm.numParameters(); ++i) {
181 // skip parameters with types that cannot reference
183 if( !shouldAnalysisTrack(fm.getParameter(i).getType() ) ) {
187 // for the ith parameter check for sharing classes to all
188 // higher numbered parameters
189 for (int j = i + 1; j < fm.numParameters(); ++j) {
191 // skip parameters with types that cannot reference
193 if( !shouldAnalysisTrack(fm.getParameter(j).getType() ) ) {
198 common = hasPotentialSharing(td, i, j);
199 if (!common.isEmpty()) {
200 foundSomeSharing = true;
202 if (!tabularOutput) {
203 bw.write("Potential sharing between parameters " + i
204 + " and " + j + ".\n");
205 bw.write(prettyPrintNodeSet(common) + "\n");
210 // for the ith parameter, check for sharing classes against
211 // the set of allocation sites reachable from this
213 Iterator allocItr = allocSites.iterator();
214 while (allocItr.hasNext()) {
215 AllocSite as = (AllocSite) allocItr.next();
216 common = hasPotentialSharing(td, i, as);
217 if (!common.isEmpty()) {
218 foundSomeSharing = true;
220 if (!tabularOutput) {
221 bw.write("Potential sharing between parameter " + i
222 + " and " + as.getFlatNew() + ".\n");
223 bw.write(prettyPrintNodeSet(common) + "\n");
229 // for each allocation site check for sharing classes with
230 // other allocation sites in the context of execution
232 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
233 Iterator allocItr1 = allocSites.iterator();
234 while (allocItr1.hasNext()) {
235 AllocSite as1 = (AllocSite) allocItr1.next();
237 Iterator allocItr2 = allocSites.iterator();
238 while (allocItr2.hasNext()) {
239 AllocSite as2 = (AllocSite) allocItr2.next();
241 if (!outerChecked.contains(as2)) {
242 common = hasPotentialSharing(td, as1, as2);
244 if (!common.isEmpty()) {
245 foundSomeSharing = true;
247 if (!tabularOutput) {
248 bw.write("Potential sharing between "
249 + as1.getFlatNew() + " and "
250 + as2.getFlatNew() + ".\n");
251 bw.write(prettyPrintNodeSet(common) + "\n");
257 outerChecked.add(as1);
260 if (!foundSomeSharing) {
261 if (!tabularOutput) {
262 bw.write("No sharing between flagged objects in Task " + td
270 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
271 + " & " + numMethodsAnalyzed() + " \\\\\n");
273 bw.write("\nNumber sharing classes: "+numSharing);
281 // this version of writeAllSharing is for Java programs that have no tasks
282 // ***********************************
283 // WARNING: THIS DOES NOT DO THE RIGHT THING, REPORTS 0 ALWAYS!
284 // It should use mayBothReachTarget and mayManyReachTarget like
285 // OoOJava does to query analysis results
286 // ***********************************
287 public void writeAllSharingJava(String outputFile,
290 boolean tabularOutput,
293 throws java.io.IOException {
294 checkAnalysisComplete();
300 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
302 bw.write("Conducting disjoint reachability analysis with allocation depth = "
303 + allocationDepth + "\n");
304 bw.write(timeReport + "\n\n");
306 boolean foundSomeSharing = false;
308 Descriptor d = typeUtil.getMain();
309 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
311 // for each allocation site check for sharing classes with
312 // other allocation sites in the context of execution
314 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
315 Iterator allocItr1 = allocSites.iterator();
316 while (allocItr1.hasNext()) {
317 AllocSite as1 = (AllocSite) allocItr1.next();
319 Iterator allocItr2 = allocSites.iterator();
320 while (allocItr2.hasNext()) {
321 AllocSite as2 = (AllocSite) allocItr2.next();
323 if (!outerChecked.contains(as2)) {
324 Set<HeapRegionNode> common = hasPotentialSharing(d,
327 if (!common.isEmpty()) {
328 foundSomeSharing = true;
329 bw.write("Potential sharing between "
330 + as1.getDisjointAnalysisId() + " and "
331 + as2.getDisjointAnalysisId() + ".\n");
332 bw.write(prettyPrintNodeSet(common) + "\n");
338 outerChecked.add(as1);
341 if (!foundSomeSharing) {
342 bw.write("No sharing classes between flagged objects found.\n");
344 bw.write("\nNumber sharing classes: "+numSharing);
347 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
354 public Alloc getCmdLineArgsAlloc() {
355 return getAllocationSiteFromFlatNew( constructedCmdLineArgsNew );
357 public Alloc getCmdLineArgAlloc() {
358 return getAllocationSiteFromFlatNew( constructedCmdLineArgNew );
360 public Alloc getCmdLineArgBytesAlloc() {
361 return getAllocationSiteFromFlatNew( constructedCmdLineArgBytesNew );
363 public Alloc getNewStringLiteralAlloc() {
364 return newStringLiteralAlloc;
366 public Alloc getNewStringLiteralBytesAlloc() {
367 return newStringLiteralBytesAlloc;
370 ///////////////////////////////////////////
372 // end public interface
374 ///////////////////////////////////////////
378 protected void checkAnalysisComplete() {
379 if( !analysisComplete ) {
380 throw new Error("Warning: public interface method called while analysis is running.");
389 // run in faster mode, only when bugs wrung out!
390 public static boolean releaseMode;
392 // use command line option to set this, analysis
393 // should attempt to be deterministic
394 public static boolean determinismDesired;
396 // when we want to enforce determinism in the
397 // analysis we need to sort descriptors rather
398 // than toss them in efficient sets, use this
399 public static DescriptorComparator dComp =
400 new DescriptorComparator();
403 // data from the compiler
405 public CallGraph callGraph;
406 public Liveness liveness;
407 public ArrayReferencees arrayReferencees;
408 public RBlockRelationAnalysis rblockRel;
409 public TypeUtil typeUtil;
410 public int allocationDepth;
412 protected boolean doEffectsAnalysis = false;
413 protected EffectsAnalysis effectsAnalysis;
414 protected BuildStateMachines buildStateMachines;
416 protected boolean doDefiniteReachAnalysis = false;
417 protected DefiniteReachAnalysis definiteReachAnalysis;
420 // data structure for public interface
421 private Hashtable< Descriptor, HashSet<AllocSite> >
422 mapDescriptorToAllocSiteSet;
425 // for public interface methods to warn that they
426 // are grabbing results during analysis
427 private boolean analysisComplete;
430 // used to identify HeapRegionNode objects
431 // A unique ID equates an object in one
432 // ownership graph with an object in another
433 // graph that logically represents the same
435 // start at 10 and increment to reserve some
436 // IDs for special purposes
437 static protected int uniqueIDcount = 10;
440 // An out-of-scope method created by the
441 // analysis that has no parameters, and
442 // appears to allocate the command line
443 // arguments, then invoke the source code's
444 // main method. The purpose of this is to
445 // provide the analysis with an explicit
446 // top-level context with no parameters
447 protected MethodDescriptor mdAnalysisEntry;
448 protected FlatMethod fmAnalysisEntry;
450 // main method defined by source program
451 protected MethodDescriptor mdSourceEntry;
453 // the set of task and/or method descriptors
454 // reachable in call graph
455 protected Set<Descriptor>
456 descriptorsToAnalyze;
458 // current descriptors to visit in fixed-point
459 // interprocedural analysis, prioritized by
460 // dependency in the call graph
461 protected Stack<Descriptor>
462 descriptorsToVisitStack;
463 protected PriorityQueue<DescriptorQWrapper>
466 // a duplication of the above structure, but
467 // for efficient testing of inclusion
468 protected HashSet<Descriptor>
469 descriptorsToVisitSet;
471 // storage for priorities (doesn't make sense)
472 // to add it to the Descriptor class, just in
474 protected Hashtable<Descriptor, Integer>
475 mapDescriptorToPriority;
477 // when analyzing a method and scheduling more:
478 // remember set of callee's enqueued for analysis
479 // so they can be put on top of the callers in
480 // the stack-visit mode
481 protected Set<Descriptor>
484 // maps a descriptor to its current partial result
485 // from the intraprocedural fixed-point analysis--
486 // then the interprocedural analysis settles, this
487 // mapping will have the final results for each
489 protected Hashtable<Descriptor, ReachGraph>
490 mapDescriptorToCompleteReachGraph;
492 // maps a descriptor to its known dependents: namely
493 // methods or tasks that call the descriptor's method
494 // AND are part of this analysis (reachable from main)
495 protected Hashtable< Descriptor, Set<Descriptor> >
496 mapDescriptorToSetDependents;
498 // if the analysis client wants to flag allocation sites
499 // programmatically, it should provide a set of FlatNew
500 // statements--this may be null if unneeded
501 protected Set<FlatNew> sitesToFlag;
503 // maps each flat new to one analysis abstraction
504 // allocate site object, these exist outside reach graphs
505 protected Hashtable<FlatNew, AllocSite>
506 mapFlatNewToAllocSite;
508 // maps intergraph heap region IDs to intergraph
509 // allocation sites that created them, a redundant
510 // structure for efficiency in some operations
511 protected Hashtable<Integer, AllocSite>
514 // maps a method to its initial heap model (IHM) that
515 // is the set of reachability graphs from every caller
516 // site, all merged together. The reason that we keep
517 // them separate is that any one call site's contribution
518 // to the IHM may changed along the path to the fixed point
519 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
520 mapDescriptorToIHMcontributions;
522 // additionally, keep a mapping from descriptors to the
523 // merged in-coming initial context, because we want this
524 // initial context to be STRICTLY MONOTONIC
525 protected Hashtable<Descriptor, ReachGraph>
526 mapDescriptorToInitialContext;
528 // mapping of current partial results for a given node. Note that
529 // to reanalyze a method we discard all partial results because a
530 // null reach graph indicates the node needs to be visited on the
531 // way to the fixed point.
532 // The reason for a persistent mapping is so after the analysis we
533 // can ask for the graph of any node at the fixed point, but this
534 // option is only enabled with a compiler flag.
535 protected Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraphPersist;
536 protected Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph;
539 // make the result for back edges analysis-wide STRICTLY
540 // MONOTONIC as well, but notice we use FlatNode as the
541 // key for this map: in case we want to consider other
542 // nodes as back edge's in future implementations
543 protected Hashtable<FlatNode, ReachGraph>
544 mapBackEdgeToMonotone;
547 public static final String arrayElementFieldName = "___element_";
548 static protected Hashtable<TypeDescriptor, FieldDescriptor>
552 protected boolean suppressOutput;
554 // for controlling DOT file output
555 protected boolean writeFinalDOTs;
556 protected boolean writeAllIncrementalDOTs;
558 // supporting DOT output--when we want to write every
559 // partial method result, keep a tally for generating
561 protected Hashtable<Descriptor, Integer>
562 mapDescriptorToNumUpdates;
564 //map task descriptor to initial task parameter
565 protected Hashtable<Descriptor, ReachGraph>
566 mapDescriptorToReachGraph;
568 protected PointerMethod pm;
570 //Keeps track of all the reach graphs at every program point
571 //DO NOT USE UNLESS YOU REALLY NEED IT
572 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtEnter =
573 new Hashtable<FlatNode, ReachGraph>();
575 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtExit =
576 new Hashtable<FlatNode, ReachGraph>();
579 private Hashtable<FlatCall, Descriptor> fc2enclosing;
581 Accessible accessible;
584 // we construct an entry method of flat nodes complete
585 // with a new allocation site to model the command line
586 // args creation just for the analysis, so remember that
587 // allocation site. Later in code gen we might want to
588 // know if something is pointing-to to the cmd line args
589 // and we can verify by checking the allocation site field.
590 protected FlatNew constructedCmdLineArgsNew;
591 protected FlatNew constructedCmdLineArgNew;
592 protected FlatNew constructedCmdLineArgBytesNew;
594 // similar to above, the runtime allocates new strings
595 // for literal nodes, so make up an alloc to model that
596 protected AllocSite newStringLiteralAlloc;
597 protected AllocSite newStringLiteralBytesAlloc;
599 // both of the above need the descriptor of the field
600 // for the String's value field to reference by the
601 // byte array from the string object
602 protected TypeDescriptor stringType;
603 protected TypeDescriptor stringBytesType;
604 protected FieldDescriptor stringBytesField;
607 protected void initImplicitStringsModel() {
609 ClassDescriptor cdString = typeUtil.getClass( typeUtil.StringClass );
610 assert cdString != null;
614 new TypeDescriptor( cdString );
617 new TypeDescriptor(TypeDescriptor.CHAR).makeArray( state );
620 stringBytesField = null;
621 Iterator sFieldsItr = cdString.getFields();
622 while( sFieldsItr.hasNext() ) {
623 FieldDescriptor fd = (FieldDescriptor) sFieldsItr.next();
624 if( fd.getSymbol().equals( typeUtil.StringClassValueField ) ) {
625 stringBytesField = fd;
629 assert stringBytesField != null;
632 TempDescriptor throwAway1 =
633 new TempDescriptor("stringLiteralTemp_dummy1",
636 FlatNew fnStringLiteral =
637 new FlatNew(stringType,
641 newStringLiteralAlloc
642 = getAllocSiteFromFlatNewPRIVATE( fnStringLiteral );
645 TempDescriptor throwAway2 =
646 new TempDescriptor("stringLiteralTemp_dummy2",
649 FlatNew fnStringLiteralBytes =
650 new FlatNew(stringBytesType,
654 newStringLiteralBytesAlloc
655 = getAllocSiteFromFlatNewPRIVATE( fnStringLiteralBytes );
661 // allocate various structures that are not local
662 // to a single class method--should be done once
663 protected void allocateStructures() {
665 if( determinismDesired ) {
666 // use an ordered set
667 descriptorsToAnalyze = new TreeSet<Descriptor>(dComp);
669 // otherwise use a speedy hashset
670 descriptorsToAnalyze = new HashSet<Descriptor>();
673 mapDescriptorToCompleteReachGraph =
674 new Hashtable<Descriptor, ReachGraph>();
676 mapDescriptorToNumUpdates =
677 new Hashtable<Descriptor, Integer>();
679 mapDescriptorToSetDependents =
680 new Hashtable< Descriptor, Set<Descriptor> >();
682 mapFlatNewToAllocSite =
683 new Hashtable<FlatNew, AllocSite>();
685 mapDescriptorToIHMcontributions =
686 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
688 mapDescriptorToInitialContext =
689 new Hashtable<Descriptor, ReachGraph>();
691 mapFlatNodeToReachGraphPersist =
692 new Hashtable<FlatNode, ReachGraph>();
694 mapBackEdgeToMonotone =
695 new Hashtable<FlatNode, ReachGraph>();
697 mapHrnIdToAllocSite =
698 new Hashtable<Integer, AllocSite>();
700 mapTypeToArrayField =
701 new Hashtable <TypeDescriptor, FieldDescriptor>();
703 if( state.DISJOINTDVISITSTACK ||
704 state.DISJOINTDVISITSTACKEESONTOP
706 descriptorsToVisitStack =
707 new Stack<Descriptor>();
710 if( state.DISJOINTDVISITPQUE ) {
711 descriptorsToVisitQ =
712 new PriorityQueue<DescriptorQWrapper>();
715 descriptorsToVisitSet =
716 new HashSet<Descriptor>();
718 mapDescriptorToPriority =
719 new Hashtable<Descriptor, Integer>();
722 new HashSet<Descriptor>();
724 mapDescriptorToAllocSiteSet =
725 new Hashtable<Descriptor, HashSet<AllocSite> >();
727 mapDescriptorToReachGraph =
728 new Hashtable<Descriptor, ReachGraph>();
730 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
735 // this analysis generates a disjoint reachability
736 // graph for every reachable method in the program
737 public DisjointAnalysis(State s,
742 Set<FlatNew> sitesToFlag,
743 RBlockRelationAnalysis rra
745 init(s, tu, cg, l, ar, sitesToFlag, rra, null, false);
748 public DisjointAnalysis(State s,
753 Set<FlatNew> sitesToFlag,
754 RBlockRelationAnalysis rra,
755 boolean suppressOutput
757 init(s, tu, cg, l, ar, sitesToFlag, rra, null, suppressOutput);
760 public DisjointAnalysis(State s,
765 Set<FlatNew> sitesToFlag,
766 RBlockRelationAnalysis rra,
767 BuildStateMachines bsm,
768 boolean suppressOutput
770 init(s, tu, cg, l, ar, sitesToFlag, rra, bsm, suppressOutput);
773 protected void init(State state,
777 ArrayReferencees arrayReferencees,
778 Set<FlatNew> sitesToFlag,
779 RBlockRelationAnalysis rra,
780 BuildStateMachines bsm,
781 boolean suppressOutput
784 analysisComplete = false;
787 this.typeUtil = typeUtil;
788 this.callGraph = callGraph;
789 this.liveness = liveness;
790 this.arrayReferencees = arrayReferencees;
791 this.sitesToFlag = sitesToFlag;
792 this.rblockRel = rra;
793 this.suppressOutput = suppressOutput;
794 this.buildStateMachines = bsm;
796 if( rblockRel != null ) {
797 doEffectsAnalysis = true;
798 effectsAnalysis = new EffectsAnalysis();
800 EffectsAnalysis.state = state;
801 EffectsAnalysis.buildStateMachines = buildStateMachines;
803 //note: instead of reachgraph's isAccessible, using the result of accessible analysis
804 //since accessible gives us more accurate results
805 accessible=new Accessible(state, callGraph, rra, liveness);
806 accessible.doAnalysis();
809 this.allocationDepth = state.DISJOINTALLOCDEPTH;
810 this.releaseMode = state.DISJOINTRELEASEMODE;
811 this.determinismDesired = state.DISJOINTDETERMINISM;
813 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL;
814 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL;
816 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
817 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
818 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
819 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
820 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
821 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
822 this.snapNodeCounter = 0; // count nodes from 0
825 state.DISJOINTDVISITSTACK ||
826 state.DISJOINTDVISITPQUE ||
827 state.DISJOINTDVISITSTACKEESONTOP;
828 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
829 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
830 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
832 // set some static configuration for ReachGraphs
833 ReachGraph.allocationDepth = allocationDepth;
834 ReachGraph.typeUtil = typeUtil;
835 ReachGraph.state = state;
837 ReachGraph.initOutOfScopeTemps();
839 ReachGraph.debugCallSiteVisitStartCapture
840 = state.DISJOINTDEBUGCALLVISITTOSTART;
842 ReachGraph.debugCallSiteNumVisitsToCapture
843 = state.DISJOINTDEBUGCALLNUMVISITS;
845 ReachGraph.debugCallSiteStopAfter
846 = state.DISJOINTDEBUGCALLSTOPAFTER;
848 ReachGraph.debugCallSiteVisitCounter
849 = 0; // count visits from 1, is incremented before first visit
851 pm = new PointerMethod();
853 if( state.DO_DEFINITE_REACH_ANALYSIS ) {
854 doDefiniteReachAnalysis = true;
855 definiteReachAnalysis = new DefiniteReachAnalysis( pm );
859 if( suppressOutput ) {
860 System.out.println("* Running disjoint reachability analysis with output suppressed! *");
864 allocateStructures();
866 initImplicitStringsModel();
870 double timeStartAnalysis = (double) System.nanoTime();
872 // start interprocedural fixed-point computation
875 } catch( IOException e ) {
876 throw new Error("IO Exception while writing disjointness analysis output.");
879 analysisComplete=true;
881 double timeEndAnalysis = (double) System.nanoTime();
882 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow(10.0, 9.0) );
885 if( sitesToFlag != null ) {
886 treport = String.format("Disjoint reachability analysis flagged %d sites and took %.3f sec.", sitesToFlag.size(), dt);
887 if(sitesToFlag.size()>0) {
888 treport+="\nFlagged sites:"+"\n"+sitesToFlag.toString();
891 treport = String.format("Disjoint reachability analysis took %.3f sec.", dt);
893 if( state.DISJOINT_COUNT_VISITS ) {
894 treport += "\nFixed point algorithm visited "+totalMethodVisits+
895 " methods and "+totalNodeVisits+" nodes.";
897 String justtime = String.format("%.2f", dt);
898 System.out.println(treport);
902 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
906 if( state.DISJOINTWRITEIHMS ) {
910 if( state.DISJOINTWRITEINITCONTEXTS ) {
911 writeInitialContexts();
914 if( state.DISJOINT_WRITE_ALL_NODE_FINAL_GRAPHS ) {
915 writeFinalGraphsForEveryNode();
918 if( state.DISJOINTALIASFILE != null && !suppressOutput ) {
920 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
922 writeAllSharingJava(state.DISJOINTALIASFILE,
925 state.DISJOINTALIASTAB,
932 buildStateMachines.writeStateMachines();
935 } catch( IOException e ) {
936 throw new Error("IO Exception while writing disjointness analysis output.");
941 protected boolean moreDescriptorsToVisit() {
942 if( state.DISJOINTDVISITSTACK ||
943 state.DISJOINTDVISITSTACKEESONTOP
945 return !descriptorsToVisitStack.isEmpty();
947 } else if( state.DISJOINTDVISITPQUE ) {
948 return !descriptorsToVisitQ.isEmpty();
951 throw new Error("Neither descriptor visiting mode set");
955 // fixed-point computation over the call graph--when a
956 // method's callees are updated, it must be reanalyzed
957 protected void analyzeMethods() throws java.io.IOException {
959 // task or non-task (java) mode determines what the roots
960 // of the call chain are, and establishes the set of methods
961 // reachable from the roots that will be analyzed
964 if( !suppressOutput ) {
965 System.out.println("Bamboo mode...");
968 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
969 while( taskItr.hasNext() ) {
970 TaskDescriptor td = (TaskDescriptor) taskItr.next();
971 if( !descriptorsToAnalyze.contains(td) ) {
972 // add all methods transitively reachable from the
974 descriptorsToAnalyze.add(td);
975 descriptorsToAnalyze.addAll(callGraph.getAllMethods(td) );
980 if( !suppressOutput ) {
981 System.out.println("Java mode...");
984 // add all methods transitively reachable from the
985 // source's main to set for analysis
986 mdSourceEntry = typeUtil.getMain();
987 descriptorsToAnalyze.add(mdSourceEntry);
988 descriptorsToAnalyze.addAll(callGraph.getAllMethods(mdSourceEntry) );
990 // fabricate an empty calling context that will call
991 // the source's main, but call graph doesn't know
992 // about it, so explicitly add it
993 makeAnalysisEntryMethod(mdSourceEntry);
994 descriptorsToAnalyze.add(mdAnalysisEntry);
999 // now, depending on the interprocedural mode for visiting
1000 // methods, set up the needed data structures
1002 if( state.DISJOINTDVISITPQUE ) {
1004 // topologically sort according to the call graph so
1005 // leaf calls are last, helps build contexts up first
1006 LinkedList<Descriptor> sortedDescriptors =
1007 topologicalSort(descriptorsToAnalyze);
1009 // add sorted descriptors to priority queue, and duplicate
1010 // the queue as a set for efficiently testing whether some
1011 // method is marked for analysis
1013 Iterator<Descriptor> dItr;
1015 // for the priority queue, give items at the head
1016 // of the sorted list a low number (highest priority)
1017 while( !sortedDescriptors.isEmpty() ) {
1018 Descriptor d = sortedDescriptors.removeFirst();
1019 mapDescriptorToPriority.put(d, new Integer(p) );
1020 descriptorsToVisitQ.add(new DescriptorQWrapper(p, d) );
1021 descriptorsToVisitSet.add(d);
1025 } else if( state.DISJOINTDVISITSTACK ||
1026 state.DISJOINTDVISITSTACKEESONTOP
1028 // if we're doing the stack scheme, just throw the root
1029 // method or tasks on the stack
1031 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
1032 while( taskItr.hasNext() ) {
1033 TaskDescriptor td = (TaskDescriptor) taskItr.next();
1034 descriptorsToVisitStack.add(td);
1035 descriptorsToVisitSet.add(td);
1039 descriptorsToVisitStack.add(mdAnalysisEntry);
1040 descriptorsToVisitSet.add(mdAnalysisEntry);
1044 throw new Error("Unknown method scheduling mode");
1048 // analyze scheduled methods until there are no more to visit
1049 while( moreDescriptorsToVisit() ) {
1050 Descriptor d = null;
1052 if( state.DISJOINTDVISITSTACK ||
1053 state.DISJOINTDVISITSTACKEESONTOP
1055 d = descriptorsToVisitStack.pop();
1057 } else if( state.DISJOINTDVISITPQUE ) {
1058 d = descriptorsToVisitQ.poll().getDescriptor();
1061 assert descriptorsToVisitSet.contains(d);
1062 descriptorsToVisitSet.remove(d);
1064 // because the task or method descriptor just extracted
1065 // was in the "to visit" set it either hasn't been analyzed
1066 // yet, or some method that it depends on has been
1067 // updated. Recompute a complete reachability graph for
1068 // this task/method and compare it to any previous result.
1069 // If there is a change detected, add any methods/tasks
1070 // that depend on this one to the "to visit" set.
1072 if( !suppressOutput ) {
1073 System.out.println("Analyzing " + d);
1076 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1077 assert calleesToEnqueue.isEmpty();
1080 ReachGraph rg = analyzeMethod(d);
1081 ReachGraph rgPrev = getPartial(d);
1083 if( !rg.equals(rgPrev) ) {
1086 if( state.DISJOINTDEBUGSCHEDULING ) {
1087 System.out.println(" complete graph changed, scheduling callers for analysis:");
1090 // results for d changed, so enqueue dependents
1091 // of d for further analysis
1092 Iterator<Descriptor> depsItr = getDependents(d).iterator();
1093 while( depsItr.hasNext() ) {
1094 Descriptor dNext = depsItr.next();
1097 if( state.DISJOINTDEBUGSCHEDULING ) {
1098 System.out.println(" "+dNext);
1103 // whether or not the method under analysis changed,
1104 // we may have some callees that are scheduled for
1105 // more analysis, and they should go on the top of
1106 // the stack now (in other method-visiting modes they
1107 // are already enqueued at this point
1108 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1109 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
1110 while( depsItr.hasNext() ) {
1111 Descriptor dNext = depsItr.next();
1114 calleesToEnqueue.clear();
1120 protected ReachGraph analyzeMethod(Descriptor d)
1121 throws java.io.IOException {
1123 if( state.DISJOINT_COUNT_VISITS ) {
1124 ++totalMethodVisits;
1127 // get the flat code for this descriptor
1129 if( d == mdAnalysisEntry ) {
1130 fm = fmAnalysisEntry;
1132 fm = state.getMethodFlat(d);
1134 pm.analyzeMethod(fm);
1136 // intraprocedural work set
1137 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
1138 flatNodesToVisit.add(fm);
1140 // if determinism is desired by client, shadow the
1141 // set with a queue to make visit order deterministic
1142 Queue<FlatNode> flatNodesToVisitQ = null;
1143 if( determinismDesired ) {
1144 flatNodesToVisitQ = new LinkedList<FlatNode>();
1145 flatNodesToVisitQ.add(fm);
1148 // start a new mapping of partial results
1149 mapFlatNodeToReachGraph =
1150 new Hashtable<FlatNode, ReachGraph>();
1152 // the set of return nodes partial results that will be combined as
1153 // the final, conservative approximation of the entire method
1154 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
1158 boolean snapThisMethod = false;
1159 if( takeDebugSnapshots && d instanceof MethodDescriptor ) {
1160 MethodDescriptor mdThisMethod = (MethodDescriptor)d;
1161 ClassDescriptor cdThisMethod = mdThisMethod.getClassDesc();
1162 if( cdThisMethod != null ) {
1164 descSymbolDebug.equals( cdThisMethod.getSymbol()+
1166 mdThisMethod.getSymbol()
1173 while( !flatNodesToVisit.isEmpty() ) {
1176 if( determinismDesired ) {
1177 assert !flatNodesToVisitQ.isEmpty();
1178 fn = flatNodesToVisitQ.remove();
1180 fn = flatNodesToVisit.iterator().next();
1182 flatNodesToVisit.remove(fn);
1184 // effect transfer function defined by this node,
1185 // then compare it to the old graph at this node
1186 // to see if anything was updated.
1188 ReachGraph rg = new ReachGraph();
1189 TaskDescriptor taskDesc;
1190 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null) {
1191 if(mapDescriptorToReachGraph.containsKey(taskDesc)) {
1192 // retrieve existing reach graph if it is not first time
1193 rg=mapDescriptorToReachGraph.get(taskDesc);
1195 // create initial reach graph for a task
1196 rg=createInitialTaskReachGraph((FlatMethod)fn);
1198 mapDescriptorToReachGraph.put(taskDesc, rg);
1202 // start by merging all node's parents' graphs
1203 for( int i = 0; i < pm.numPrev(fn); ++i ) {
1204 FlatNode pn = pm.getPrev(fn,i);
1205 if( mapFlatNodeToReachGraph.containsKey(pn) ) {
1206 ReachGraph rgParent = mapFlatNodeToReachGraph.get(pn);
1212 if( snapThisMethod ) {
1213 debugSnapshot(rg, fn, true);
1217 // modify rg with appropriate transfer function
1218 rg = analyzeFlatNode(d, fm, fn, setReturns, rg);
1221 if( snapThisMethod ) {
1222 debugSnapshot(rg, fn, false);
1227 // if the results of the new graph are different from
1228 // the current graph at this node, replace the graph
1229 // with the update and enqueue the children
1230 ReachGraph rgPrev = mapFlatNodeToReachGraph.get(fn);
1231 if( !rg.equals(rgPrev) ) {
1232 mapFlatNodeToReachGraph.put(fn, rg);
1234 // we don't necessarily want to keep the reach graph for every
1235 // node in the program unless a client or the user wants it
1236 if( state.DISJOINT_WRITE_ALL_NODE_FINAL_GRAPHS ) {
1237 mapFlatNodeToReachGraphPersist.put(fn, rg);
1240 for( int i = 0; i < pm.numNext(fn); i++ ) {
1241 FlatNode nn = pm.getNext(fn, i);
1243 flatNodesToVisit.add(nn);
1244 if( determinismDesired ) {
1245 flatNodesToVisitQ.add(nn);
1252 // end by merging all return nodes into a complete
1253 // reach graph that represents all possible heap
1254 // states after the flat method returns
1255 ReachGraph completeGraph = new ReachGraph();
1257 if( setReturns.isEmpty() ) {
1258 System.out.println( "d = "+d );
1261 assert !setReturns.isEmpty();
1262 Iterator retItr = setReturns.iterator();
1263 while( retItr.hasNext() ) {
1264 FlatReturnNode frn = (FlatReturnNode) retItr.next();
1266 assert mapFlatNodeToReachGraph.containsKey(frn);
1267 ReachGraph rgRet = mapFlatNodeToReachGraph.get(frn);
1269 completeGraph.merge(rgRet);
1273 if( snapThisMethod ) {
1274 // increment that we've visited the debug snap
1275 // method, and reset the node counter
1276 System.out.println(" @@@ debug snap at visit "+snapVisitCounter);
1278 snapNodeCounter = 0;
1280 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
1283 System.out.println("!!! Stopping analysis after debug snap captures. !!!");
1289 return completeGraph;
1293 protected ReachGraph
1294 analyzeFlatNode(Descriptor d,
1295 FlatMethod fmContaining,
1297 HashSet<FlatReturnNode> setRetNodes,
1299 ) throws java.io.IOException {
1302 if( state.DISJOINT_COUNT_VISITS ) {
1307 // any variables that are no longer live should be
1308 // nullified in the graph to reduce edges
1309 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1313 FieldDescriptor fld;
1314 TypeDescriptor tdElement;
1315 FieldDescriptor fdElement;
1316 FlatSESEEnterNode sese;
1317 FlatSESEExitNode fsexn;
1319 boolean alreadyReachable;
1320 Set<EdgeKey> edgeKeysForLoad;
1321 Set<EdgeKey> edgeKeysRemoved;
1322 Set<EdgeKey> edgeKeysAdded;
1323 Set<DefiniteReachState.FdEntry> edgesToElideFromProp;
1325 //Stores the flatnode's reach graph at enter
1326 ReachGraph rgOnEnter = new ReachGraph();
1327 rgOnEnter.merge(rg);
1328 fn2rgAtEnter.put(fn, rgOnEnter);
1332 boolean didDefReachTransfer = false;
1336 // use node type to decide what transfer function
1337 // to apply to the reachability graph
1338 switch( fn.kind() ) {
1340 case FKind.FlatGenReachNode: {
1341 FlatGenReachNode fgrn = (FlatGenReachNode) fn;
1343 System.out.println(" Generating reach graph for program point: "+fgrn.getGraphName() );
1346 rg.writeGraph("genReach"+fgrn.getGraphName(),
1347 true, // write labels (variables)
1348 true, // selectively hide intermediate temp vars
1349 true, // prune unreachable heap regions
1350 false, // hide reachability altogether
1351 true, // hide subset reachability states
1352 true, // hide predicates
1353 true); //false); // hide edge taints
1357 case FKind.FlatGenDefReachNode: {
1358 FlatGenDefReachNode fgdrn = (FlatGenDefReachNode) fn;
1359 if( doDefiniteReachAnalysis ) {
1360 definiteReachAnalysis.writeState( fn, fgdrn.getOutputName() );
1365 case FKind.FlatMethod: {
1366 // construct this method's initial heap model (IHM)
1367 // since we're working on the FlatMethod, we know
1368 // the incoming ReachGraph 'rg' is empty
1370 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1371 getIHMcontributions(d);
1373 Set entrySet = heapsFromCallers.entrySet();
1374 Iterator itr = entrySet.iterator();
1375 while( itr.hasNext() ) {
1376 Map.Entry me = (Map.Entry)itr.next();
1377 FlatCall fc = (FlatCall) me.getKey();
1378 ReachGraph rgContrib = (ReachGraph) me.getValue();
1380 // note that "fc.getMethod()" like (Object.toString)
1381 // might not be equal to "d" like (String.toString)
1382 // because the mapping gets set up when we resolve
1384 rg.merge(rgContrib);
1387 // additionally, we are enforcing STRICT MONOTONICITY for the
1388 // method's initial context, so grow the context by whatever
1389 // the previously computed context was, and put the most
1390 // up-to-date context back in the map
1391 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get(d);
1392 rg.merge(rgPrevContext);
1393 mapDescriptorToInitialContext.put(d, rg);
1395 if( doDefiniteReachAnalysis ) {
1396 FlatMethod fm = (FlatMethod) fn;
1397 Set<TempDescriptor> params = new HashSet<TempDescriptor>();
1398 for( int i = 0; i < fm.numParameters(); ++i ) {
1399 params.add( fm.getParameter( i ) );
1401 definiteReachAnalysis.methodEntry( fn, params );
1402 didDefReachTransfer = true;
1406 case FKind.FlatOpNode:
1407 FlatOpNode fon = (FlatOpNode) fn;
1408 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1409 lhs = fon.getDest();
1410 rhs = fon.getLeft();
1412 // before transfer, do effects analysis support
1413 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1414 if(rblockRel.isPotentialStallSite(fn)) {
1415 // x gets status of y
1416 if(!accessible.isAccessible(fn, rhs)) {
1417 rg.makeInaccessible(lhs);
1423 rg.assignTempXEqualToTempY(lhs, rhs);
1425 if( doDefiniteReachAnalysis ) {
1426 definiteReachAnalysis.copy( fn, lhs, rhs );
1427 didDefReachTransfer = true;
1432 case FKind.FlatCastNode:
1433 FlatCastNode fcn = (FlatCastNode) fn;
1437 TypeDescriptor td = fcn.getType();
1440 // before transfer, do effects analysis support
1441 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1442 if(rblockRel.isPotentialStallSite(fn)) {
1443 // x gets status of y
1444 if(!accessible.isAccessible(fn,rhs)) {
1445 rg.makeInaccessible(lhs);
1451 rg.assignTempXEqualToCastedTempY(lhs, rhs, td);
1453 if( doDefiniteReachAnalysis ) {
1454 definiteReachAnalysis.copy( fn, lhs, rhs );
1455 didDefReachTransfer = true;
1459 case FKind.FlatFieldNode:
1460 FlatFieldNode ffn = (FlatFieldNode) fn;
1464 fld = ffn.getField();
1466 // before graph transform, possible inject
1467 // a stall-site taint
1468 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1470 if(rblockRel.isPotentialStallSite(fn)) {
1471 // x=y.f, stall y if not accessible
1472 // contributes read effects on stall site of y
1473 if(!accessible.isAccessible(fn,rhs)) {
1474 rg.taintStallSite(fn, rhs);
1477 // after this, x and y are accessbile.
1478 rg.makeAccessible(lhs);
1479 rg.makeAccessible(rhs);
1483 edgeKeysForLoad = null;
1484 if( doDefiniteReachAnalysis ) {
1485 edgeKeysForLoad = new HashSet<EdgeKey>();
1488 if( shouldAnalysisTrack(fld.getType() ) ) {
1490 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fld, fn, edgeKeysForLoad );
1492 if( doDefiniteReachAnalysis ) {
1493 definiteReachAnalysis.load( fn, lhs, rhs, fld, edgeKeysForLoad );
1494 didDefReachTransfer = true;
1498 // after transfer, use updated graph to
1499 // do effects analysis
1500 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1501 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fld, fn);
1505 case FKind.FlatSetFieldNode:
1506 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1508 lhs = fsfn.getDst();
1509 fld = fsfn.getField();
1510 rhs = fsfn.getSrc();
1512 boolean strongUpdate = false;
1514 alreadyReachable = false;
1515 edgeKeysRemoved = null;
1516 edgeKeysAdded = null;
1517 edgesToElideFromProp = null;
1518 if( doDefiniteReachAnalysis ) {
1519 alreadyReachable = definiteReachAnalysis.isAlreadyReachable( rhs, lhs, fn );
1520 edgeKeysRemoved = new HashSet<EdgeKey>();
1521 edgeKeysAdded = new HashSet<EdgeKey>();
1522 edgesToElideFromProp = definiteReachAnalysis.edgesToElidePropagation( lhs, rhs, fn );
1525 // before transfer func, possibly inject
1526 // stall-site taints
1527 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1529 if(rblockRel.isPotentialStallSite(fn)) {
1530 // x.y=f , stall x and y if they are not accessible
1531 // also contribute write effects on stall site of x
1532 if(!accessible.isAccessible(fn,lhs)) {
1533 rg.taintStallSite(fn, lhs);
1536 if(!accessible.isAccessible(fn,rhs)) {
1537 rg.taintStallSite(fn, rhs);
1540 // accessible status update
1541 rg.makeAccessible(lhs);
1542 rg.makeAccessible(rhs);
1546 if( shouldAnalysisTrack(fld.getType() ) ) {
1548 strongUpdate = rg.assignTempXFieldFEqualToTempY( lhs,
1555 edgesToElideFromProp );
1556 if( doDefiniteReachAnalysis ) {
1557 definiteReachAnalysis.store( fn,
1563 didDefReachTransfer = true;
1567 // use transformed graph to do effects analysis
1568 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1569 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fld, fn, strongUpdate);
1573 case FKind.FlatElementNode:
1574 FlatElementNode fen = (FlatElementNode) fn;
1579 assert rhs.getType() != null;
1580 assert rhs.getType().isArray();
1582 tdElement = rhs.getType().dereference();
1583 fdElement = getArrayField(tdElement);
1585 // before transfer func, possibly inject
1587 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1588 if(rblockRel.isPotentialStallSite(fn)) {
1589 // x=y.f, stall y if not accessible
1590 // contributes read effects on stall site of y
1591 // after this, x and y are accessbile.
1592 if(!accessible.isAccessible(fn,rhs)) {
1593 rg.taintStallSite(fn, rhs);
1596 rg.makeAccessible(lhs);
1597 rg.makeAccessible(rhs);
1601 edgeKeysForLoad = null;
1602 if( doDefiniteReachAnalysis ) {
1603 edgeKeysForLoad = new HashSet<EdgeKey>();
1606 if( shouldAnalysisTrack(lhs.getType() ) ) {
1608 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fdElement, fn, edgeKeysForLoad );
1610 if( doDefiniteReachAnalysis ) {
1611 definiteReachAnalysis.load( fn, lhs, rhs, fdElement, edgeKeysForLoad );
1612 didDefReachTransfer = true;
1616 // use transformed graph to do effects analysis
1617 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1618 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fdElement, fn);
1622 case FKind.FlatSetElementNode:
1623 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1625 lhs = fsen.getDst();
1626 rhs = fsen.getSrc();
1628 assert lhs.getType() != null;
1629 assert lhs.getType().isArray();
1631 tdElement = lhs.getType().dereference();
1632 fdElement = getArrayField(tdElement);
1634 alreadyReachable = false;
1635 edgeKeysRemoved = null;
1636 edgeKeysAdded = null;
1637 edgesToElideFromProp = null;
1638 if( doDefiniteReachAnalysis ) {
1639 alreadyReachable = definiteReachAnalysis.isAlreadyReachable( rhs, lhs, fn );
1640 edgeKeysRemoved = new HashSet<EdgeKey>();
1641 edgeKeysAdded = new HashSet<EdgeKey>();
1642 edgesToElideFromProp = definiteReachAnalysis.edgesToElidePropagation( lhs, rhs, fn );
1645 // before transfer func, possibly inject
1646 // stall-site taints
1647 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1649 if(rblockRel.isPotentialStallSite(fn)) {
1650 // x.y=f , stall x and y if they are not accessible
1651 // also contribute write effects on stall site of x
1652 if(!accessible.isAccessible(fn,lhs)) {
1653 rg.taintStallSite(fn, lhs);
1656 if(!accessible.isAccessible(fn,rhs)) {
1657 rg.taintStallSite(fn, rhs);
1660 // accessible status update
1661 rg.makeAccessible(lhs);
1662 rg.makeAccessible(rhs);
1666 if( shouldAnalysisTrack(rhs.getType() ) ) {
1667 // transfer func, BUT
1668 // skip this node if it cannot create new reachability paths
1669 if( !arrayReferencees.doesNotCreateNewReaching(fsen) ) {
1670 rg.assignTempXFieldFEqualToTempY( lhs,
1677 edgesToElideFromProp );
1680 if( doDefiniteReachAnalysis ) {
1681 definiteReachAnalysis.store( fn,
1687 didDefReachTransfer = true;
1691 // use transformed graph to do effects analysis
1692 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1693 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fdElement, fn,
1699 FlatNew fnn = (FlatNew) fn;
1701 if( shouldAnalysisTrack(lhs.getType() ) ) {
1702 AllocSite as = getAllocSiteFromFlatNewPRIVATE(fnn);
1704 // before transform, support effects analysis
1705 if (doEffectsAnalysis && fmContaining != fmAnalysisEntry) {
1706 if (rblockRel.isPotentialStallSite(fn)) {
1707 // after creating new object, lhs is accessible
1708 rg.makeAccessible(lhs);
1713 rg.assignTempEqualToNewAlloc(lhs, as);
1715 if( doDefiniteReachAnalysis ) {
1716 definiteReachAnalysis.newObject( fn, lhs );
1717 didDefReachTransfer = true;
1723 case FKind.FlatLiteralNode:
1724 // BIG NOTE: this transfer function is only here for
1725 // points-to information for String literals. That's it.
1726 // Effects and disjoint reachability and all of that don't
1727 // care about references to literals.
1728 FlatLiteralNode fln = (FlatLiteralNode) fn;
1730 if( fln.getType().equals( stringType ) ) {
1731 rg.assignTempEqualToStringLiteral( fln.getDst(),
1732 newStringLiteralAlloc,
1733 newStringLiteralBytesAlloc,
1739 case FKind.FlatSESEEnterNode:
1740 sese = (FlatSESEEnterNode) fn;
1742 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1744 // always remove ALL stall site taints at enter
1745 rg.removeAllStallSiteTaints();
1747 // inject taints for in-set vars
1748 rg.taintInSetVars(sese);
1753 case FKind.FlatSESEExitNode:
1754 fsexn = (FlatSESEExitNode) fn;
1755 sese = fsexn.getFlatEnter();
1757 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1759 // @ sese exit make all live variables
1760 // inaccessible to later parent statements
1761 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
1763 // always remove ALL stall site taints at exit
1764 rg.removeAllStallSiteTaints();
1766 // remove in-set var taints for the exiting rblock
1767 rg.removeInContextTaints(sese);
1772 case FKind.FlatCall: {
1773 Descriptor mdCaller;
1774 if( fmContaining.getMethod() != null ) {
1775 mdCaller = fmContaining.getMethod();
1777 mdCaller = fmContaining.getTask();
1779 FlatCall fc = (FlatCall) fn;
1780 MethodDescriptor mdCallee = fc.getMethod();
1781 FlatMethod fmCallee = state.getMethodFlat(mdCallee);
1784 if( doDefiniteReachAnalysis ) {
1785 definiteReachAnalysis.methodCall( fn, fc.getReturnTemp() );
1786 didDefReachTransfer = true;
1790 // the transformation for a call site should update the
1791 // current heap abstraction with any effects from the callee,
1792 // or if the method is virtual, the effects from any possible
1793 // callees, so find the set of callees...
1794 Set<MethodDescriptor> setPossibleCallees;
1795 if( determinismDesired ) {
1796 // use an ordered set
1797 setPossibleCallees = new TreeSet<MethodDescriptor>(dComp);
1799 // otherwise use a speedy hashset
1800 setPossibleCallees = new HashSet<MethodDescriptor>();
1803 if( mdCallee.isStatic() ) {
1804 setPossibleCallees.add(mdCallee);
1806 TypeDescriptor typeDesc = fc.getThis().getType();
1807 setPossibleCallees.addAll(callGraph.getMethods(mdCallee,
1813 DebugCallSiteData dcsd = new DebugCallSiteData();
1815 ReachGraph rgMergeOfPossibleCallers = new ReachGraph();
1818 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1819 while( mdItr.hasNext() ) {
1820 MethodDescriptor mdPossible = mdItr.next();
1821 FlatMethod fmPossible = state.getMethodFlat(mdPossible);
1823 addDependent(mdPossible, // callee
1827 // decide for each possible resolution of the method whether we
1828 // want to debug this call site
1829 decideDebugCallSite( dcsd, mdCaller, mdPossible );
1833 // calculate the heap this call site can reach--note this is
1834 // not used for the current call site transform, we are
1835 // grabbing this heap model for future analysis of the callees,
1836 // so if different results emerge we will return to this site
1837 ReachGraph heapForThisCall_old =
1838 getIHMcontribution(mdPossible, fc);
1840 // the computation of the callee-reachable heap
1841 // is useful for making the callee starting point
1842 // and for applying the call site transfer function
1843 Set<Integer> callerNodeIDsCopiedToCallee =
1844 new HashSet<Integer>();
1847 ReachGraph heapForThisCall_cur =
1848 rg.makeCalleeView(fc,
1850 callerNodeIDsCopiedToCallee,
1855 // enforce that a call site contribution can only
1856 // monotonically increase
1857 heapForThisCall_cur.merge(heapForThisCall_old);
1859 if( !heapForThisCall_cur.equals(heapForThisCall_old) ) {
1860 // if heap at call site changed, update the contribution,
1861 // and reschedule the callee for analysis
1862 addIHMcontribution(mdPossible, fc, heapForThisCall_cur);
1864 // map a FlatCall to its enclosing method/task descriptor
1865 // so we can write that info out later
1866 fc2enclosing.put(fc, mdCaller);
1868 if( state.DISJOINTDEBUGSCHEDULING ) {
1869 System.out.println(" context changed at callsite: "+fc+", scheduling callee: "+mdPossible);
1872 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1873 calleesToEnqueue.add(mdPossible);
1875 enqueue(mdPossible);
1882 // don't alter the working graph (rg) until we compute a
1883 // result for every possible callee, merge them all together,
1884 // then set rg to that
1885 ReachGraph rgPossibleCaller = new ReachGraph();
1886 rgPossibleCaller.merge(rg);
1888 ReachGraph rgPossibleCallee = getPartial(mdPossible);
1890 if( rgPossibleCallee == null ) {
1891 // if this method has never been analyzed just schedule it
1892 // for analysis and skip over this call site for now
1893 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1894 calleesToEnqueue.add(mdPossible);
1896 enqueue(mdPossible);
1899 if( state.DISJOINTDEBUGSCHEDULING ) {
1900 System.out.println(" callee hasn't been analyzed, scheduling: "+mdPossible);
1906 // calculate the method call transform
1907 rgPossibleCaller.resolveMethodCall(fc,
1910 callerNodeIDsCopiedToCallee,
1915 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1916 if( !accessible.isAccessible(fn, ReachGraph.tdReturn) ) {
1917 rgPossibleCaller.makeInaccessible(fc.getReturnTemp() );
1923 rgMergeOfPossibleCallers.merge(rgPossibleCaller);
1928 statusDebugCallSite( dcsd );
1932 // now that we've taken care of building heap models for
1933 // callee analysis, finish this transformation
1934 rg = rgMergeOfPossibleCallers;
1937 // jjenista: what is this? It breaks compilation
1938 // of programs with no tasks/SESEs/rblocks...
1939 //XXXXXXXXXXXXXXXXXXXXXXXXX
1940 //need to consider more
1941 if( state.OOOJAVA ) {
1942 FlatNode nextFN=fmCallee.getNext(0);
1943 if( nextFN instanceof FlatSESEEnterNode ) {
1944 FlatSESEEnterNode calleeSESE=(FlatSESEEnterNode)nextFN;
1945 if(!calleeSESE.getIsLeafSESE()) {
1946 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
1954 case FKind.FlatReturnNode:
1955 FlatReturnNode frn = (FlatReturnNode) fn;
1956 rhs = frn.getReturnTemp();
1958 // before transfer, do effects analysis support
1959 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1960 if(!accessible.isAccessible(fn,rhs)) {
1961 rg.makeInaccessible(ReachGraph.tdReturn);
1965 if( rhs != null && shouldAnalysisTrack(rhs.getType() ) ) {
1966 rg.assignReturnEqualToTemp(rhs);
1969 setRetNodes.add(frn);
1976 if( doDefiniteReachAnalysis && !didDefReachTransfer ) {
1977 definiteReachAnalysis.otherStatement( fn );
1982 // dead variables were removed before the above transfer function
1983 // was applied, so eliminate heap regions and edges that are no
1984 // longer part of the abstractly-live heap graph, and sweep up
1985 // and reachability effects that are altered by the reduction
1986 //rg.abstractGarbageCollect();
1990 // back edges are strictly monotonic
1991 if( pm.isBackEdge(fn) ) {
1992 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get(fn);
1993 rg.merge(rgPrevResult);
1994 mapBackEdgeToMonotone.put(fn, rg);
1998 ReachGraph rgOnExit = new ReachGraph();
2000 fn2rgAtExit.put(fn, rgOnExit);
2004 // at this point rg should be the correct update
2005 // by an above transfer function, or untouched if
2006 // the flat node type doesn't affect the heap
2012 // this method should generate integers strictly greater than zero!
2013 // special "shadow" regions are made from a heap region by negating
2015 static public Integer generateUniqueHeapRegionNodeID() {
2017 return new Integer(uniqueIDcount);
2022 static public FieldDescriptor getArrayField(TypeDescriptor tdElement) {
2023 FieldDescriptor fdElement = mapTypeToArrayField.get(tdElement);
2024 if( fdElement == null ) {
2025 fdElement = new FieldDescriptor(new Modifiers(Modifiers.PUBLIC),
2027 arrayElementFieldName,
2030 mapTypeToArrayField.put(tdElement, fdElement);
2037 private void writeFinalGraphs() {
2038 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
2039 Iterator itr = entrySet.iterator();
2040 while( itr.hasNext() ) {
2041 Map.Entry me = (Map.Entry)itr.next();
2042 Descriptor d = (Descriptor) me.getKey();
2043 ReachGraph rg = (ReachGraph) me.getValue();
2046 if( d instanceof TaskDescriptor ) {
2047 graphName = "COMPLETEtask"+d;
2049 graphName = "COMPLETE"+d;
2052 rg.writeGraph(graphName,
2053 true, // write labels (variables)
2054 true, // selectively hide intermediate temp vars
2055 true, // prune unreachable heap regions
2056 false, // hide reachability altogether
2057 true, // hide subset reachability states
2058 true, // hide predicates
2059 true); // hide edge taints
2063 private void writeFinalIHMs() {
2064 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
2065 while( d2IHMsItr.hasNext() ) {
2066 Map.Entry me1 = (Map.Entry)d2IHMsItr.next();
2067 Descriptor d = (Descriptor) me1.getKey();
2068 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>)me1.getValue();
2070 Iterator fc2rgItr = IHMs.entrySet().iterator();
2071 while( fc2rgItr.hasNext() ) {
2072 Map.Entry me2 = (Map.Entry)fc2rgItr.next();
2073 FlatCall fc = (FlatCall) me2.getKey();
2074 ReachGraph rg = (ReachGraph) me2.getValue();
2076 rg.writeGraph("IHMPARTFOR"+d+"FROM"+fc2enclosing.get(fc)+fc,
2077 true, // write labels (variables)
2078 true, // selectively hide intermediate temp vars
2079 true, // hide reachability altogether
2080 true, // prune unreachable heap regions
2081 true, // hide subset reachability states
2082 false, // hide predicates
2083 true); // hide edge taints
2088 private void writeInitialContexts() {
2089 Set entrySet = mapDescriptorToInitialContext.entrySet();
2090 Iterator itr = entrySet.iterator();
2091 while( itr.hasNext() ) {
2092 Map.Entry me = (Map.Entry)itr.next();
2093 Descriptor d = (Descriptor) me.getKey();
2094 ReachGraph rg = (ReachGraph) me.getValue();
2096 rg.writeGraph("INITIAL"+d,
2097 true, // write labels (variables)
2098 true, // selectively hide intermediate temp vars
2099 true, // prune unreachable heap regions
2100 false, // hide all reachability
2101 true, // hide subset reachability states
2102 true, // hide predicates
2103 false); // hide edge taints
2107 private void writeFinalGraphsForEveryNode() {
2108 Set entrySet = mapFlatNodeToReachGraphPersist.entrySet();
2109 Iterator itr = entrySet.iterator();
2110 while( itr.hasNext() ) {
2111 Map.Entry me = (Map.Entry) itr.next();
2112 FlatNode fn = (FlatNode) me.getKey();
2113 ReachGraph rg = (ReachGraph) me.getValue();
2115 rg.writeGraph("NODEFINAL"+fn,
2116 true, // write labels (variables)
2117 false, // selectively hide intermediate temp vars
2118 true, // prune unreachable heap regions
2119 true, // hide all reachability
2120 true, // hide subset reachability states
2121 true, // hide predicates
2122 true); // hide edge taints
2127 protected ReachGraph getPartial(Descriptor d) {
2128 return mapDescriptorToCompleteReachGraph.get(d);
2131 protected void setPartial(Descriptor d, ReachGraph rg) {
2132 mapDescriptorToCompleteReachGraph.put(d, rg);
2134 // when the flag for writing out every partial
2135 // result is set, we should spit out the graph,
2136 // but in order to give it a unique name we need
2137 // to track how many partial results for this
2138 // descriptor we've already written out
2139 if( writeAllIncrementalDOTs ) {
2140 if( !mapDescriptorToNumUpdates.containsKey(d) ) {
2141 mapDescriptorToNumUpdates.put(d, new Integer(0) );
2143 Integer n = mapDescriptorToNumUpdates.get(d);
2146 if( d instanceof TaskDescriptor ) {
2147 graphName = d+"COMPLETEtask"+String.format("%05d", n);
2149 graphName = d+"COMPLETE"+String.format("%05d", n);
2152 rg.writeGraph(graphName,
2153 true, // write labels (variables)
2154 true, // selectively hide intermediate temp vars
2155 true, // prune unreachable heap regions
2156 false, // hide all reachability
2157 true, // hide subset reachability states
2158 false, // hide predicates
2159 false); // hide edge taints
2161 mapDescriptorToNumUpdates.put(d, n + 1);
2167 // return just the allocation site associated with one FlatNew node
2168 protected AllocSite getAllocSiteFromFlatNewPRIVATE(FlatNew fnew) {
2170 boolean flagProgrammatically = false;
2171 if( sitesToFlag != null && sitesToFlag.contains(fnew) ) {
2172 flagProgrammatically = true;
2175 if( !mapFlatNewToAllocSite.containsKey(fnew) ) {
2176 AllocSite as = AllocSite.factory(allocationDepth,
2178 fnew.getDisjointId(),
2179 flagProgrammatically
2182 // the newest nodes are single objects
2183 for( int i = 0; i < allocationDepth; ++i ) {
2184 Integer id = generateUniqueHeapRegionNodeID();
2185 as.setIthOldest(i, id);
2186 mapHrnIdToAllocSite.put(id, as);
2189 // the oldest node is a summary node
2190 as.setSummary(generateUniqueHeapRegionNodeID() );
2192 mapFlatNewToAllocSite.put(fnew, as);
2195 return mapFlatNewToAllocSite.get(fnew);
2199 public static boolean shouldAnalysisTrack(TypeDescriptor type) {
2200 // don't track primitive types, but an array
2201 // of primitives is heap memory
2202 if( type.isImmutable() ) {
2203 return type.isArray();
2206 // everything else is an object
2210 protected int numMethodsAnalyzed() {
2211 return descriptorsToAnalyze.size();
2217 // Take in source entry which is the program's compiled entry and
2218 // create a new analysis entry, a method that takes no parameters
2219 // and appears to allocate the command line arguments and call the
2220 // source entry with them. The purpose of this analysis entry is
2221 // to provide a top-level method context with no parameters left.
2222 protected void makeAnalysisEntryMethod(MethodDescriptor mdSourceEntry) {
2224 Modifiers mods = new Modifiers();
2225 mods.addModifier(Modifiers.PUBLIC);
2226 mods.addModifier(Modifiers.STATIC);
2228 TypeDescriptor returnType = new TypeDescriptor(TypeDescriptor.VOID);
2230 this.mdAnalysisEntry =
2231 new MethodDescriptor(mods,
2233 "analysisEntryMethod"
2236 TypeDescriptor argsType = mdSourceEntry.getParamType(0);
2237 TempDescriptor cmdLineArgs =
2238 new TempDescriptor("analysisEntryTemp_args",
2242 new FlatNew(argsType,
2246 this.constructedCmdLineArgsNew = fnArgs;
2248 TypeDescriptor argType = argsType.dereference();
2249 TempDescriptor anArg =
2250 new TempDescriptor("analysisEntryTemp_arg",
2254 new FlatNew(argType,
2258 this.constructedCmdLineArgNew = fnArg;
2260 TypeDescriptor typeIndex = new TypeDescriptor(TypeDescriptor.INT);
2261 TempDescriptor index =
2262 new TempDescriptor("analysisEntryTemp_index",
2265 FlatLiteralNode fli =
2266 new FlatLiteralNode(typeIndex,
2271 FlatSetElementNode fse =
2272 new FlatSetElementNode(cmdLineArgs,
2277 TypeDescriptor typeSize = new TypeDescriptor(TypeDescriptor.INT);
2278 TempDescriptor sizeBytes =
2279 new TempDescriptor("analysisEntryTemp_size",
2282 FlatLiteralNode fls =
2283 new FlatLiteralNode(typeSize,
2288 TempDescriptor strBytes =
2289 new TempDescriptor("analysisEntryTemp_strBytes",
2293 new FlatNew(stringBytesType,
2298 this.constructedCmdLineArgBytesNew = fnBytes;
2300 FlatSetFieldNode fsf =
2301 new FlatSetFieldNode(anArg,
2306 // throw this in so you can always see what the initial heap context
2307 // looks like if you want to, its cheap
2308 FlatGenReachNode fgen = new FlatGenReachNode( "argContext" );
2310 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
2311 sourceEntryArgs[0] = cmdLineArgs;
2313 new FlatCall(mdSourceEntry,
2319 FlatReturnNode frn = new FlatReturnNode(null);
2321 FlatExit fe = new FlatExit();
2323 this.fmAnalysisEntry =
2324 new FlatMethod(mdAnalysisEntry,
2328 List<FlatNode> nodes = new LinkedList<FlatNode>();
2329 nodes.add( fnArgs );
2334 nodes.add( fnBytes );
2341 FlatNode current = this.fmAnalysisEntry;
2342 for( FlatNode next: nodes ) {
2343 current.addNext( next );
2348 // jjenista - this is useful for looking at the FlatIRGraph of the
2349 // analysis entry method constructed above if you have to modify it.
2350 // The usual method of writing FlatIRGraphs out doesn't work because
2351 // this flat method is private to the model of this analysis only.
2353 // FlatIRGraph flatMethodWriter =
2354 // new FlatIRGraph( state, false, false, false );
2355 // flatMethodWriter.writeFlatIRGraph( fmAnalysisEntry, "analysisEntry" );
2356 //} catch( IOException e ) {}
2360 protected LinkedList<Descriptor> topologicalSort(Set<Descriptor> toSort) {
2362 Set<Descriptor> discovered;
2364 if( determinismDesired ) {
2365 // use an ordered set
2366 discovered = new TreeSet<Descriptor>(dComp);
2368 // otherwise use a speedy hashset
2369 discovered = new HashSet<Descriptor>();
2372 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
2374 Iterator<Descriptor> itr = toSort.iterator();
2375 while( itr.hasNext() ) {
2376 Descriptor d = itr.next();
2378 if( !discovered.contains(d) ) {
2379 dfsVisit(d, toSort, sorted, discovered);
2386 // While we're doing DFS on call graph, remember
2387 // dependencies for efficient queuing of methods
2388 // during interprocedural analysis:
2390 // a dependent of a method decriptor d for this analysis is:
2391 // 1) a method or task that invokes d
2392 // 2) in the descriptorsToAnalyze set
2393 protected void dfsVisit(Descriptor d,
2394 Set <Descriptor> toSort,
2395 LinkedList<Descriptor> sorted,
2396 Set <Descriptor> discovered) {
2399 // only methods have callers, tasks never do
2400 if( d instanceof MethodDescriptor ) {
2402 MethodDescriptor md = (MethodDescriptor) d;
2404 // the call graph is not aware that we have a fabricated
2405 // analysis entry that calls the program source's entry
2406 if( md == mdSourceEntry ) {
2407 if( !discovered.contains(mdAnalysisEntry) ) {
2408 addDependent(mdSourceEntry, // callee
2409 mdAnalysisEntry // caller
2411 dfsVisit(mdAnalysisEntry, toSort, sorted, discovered);
2415 // otherwise call graph guides DFS
2416 Iterator itr = callGraph.getCallerSet(md).iterator();
2417 while( itr.hasNext() ) {
2418 Descriptor dCaller = (Descriptor) itr.next();
2420 // only consider callers in the original set to analyze
2421 if( !toSort.contains(dCaller) ) {
2425 if( !discovered.contains(dCaller) ) {
2426 addDependent(md, // callee
2430 dfsVisit(dCaller, toSort, sorted, discovered);
2435 // for leaf-nodes last now!
2440 protected void enqueue(Descriptor d) {
2442 if( !descriptorsToVisitSet.contains(d) ) {
2444 if( state.DISJOINTDVISITSTACK ||
2445 state.DISJOINTDVISITSTACKEESONTOP
2447 descriptorsToVisitStack.add(d);
2449 } else if( state.DISJOINTDVISITPQUE ) {
2450 Integer priority = mapDescriptorToPriority.get(d);
2451 descriptorsToVisitQ.add(new DescriptorQWrapper(priority,
2456 descriptorsToVisitSet.add(d);
2461 // a dependent of a method decriptor d for this analysis is:
2462 // 1) a method or task that invokes d
2463 // 2) in the descriptorsToAnalyze set
2464 protected void addDependent(Descriptor callee, Descriptor caller) {
2465 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2466 if( deps == null ) {
2467 deps = new HashSet<Descriptor>();
2470 mapDescriptorToSetDependents.put(callee, deps);
2473 protected Set<Descriptor> getDependents(Descriptor callee) {
2474 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2475 if( deps == null ) {
2476 deps = new HashSet<Descriptor>();
2477 mapDescriptorToSetDependents.put(callee, deps);
2483 public Hashtable<FlatCall, ReachGraph> getIHMcontributions(Descriptor d) {
2485 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2486 mapDescriptorToIHMcontributions.get(d);
2488 if( heapsFromCallers == null ) {
2489 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
2490 mapDescriptorToIHMcontributions.put(d, heapsFromCallers);
2493 return heapsFromCallers;
2496 public ReachGraph getIHMcontribution(Descriptor d,
2499 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2500 getIHMcontributions(d);
2502 if( !heapsFromCallers.containsKey(fc) ) {
2506 return heapsFromCallers.get(fc);
2510 public void addIHMcontribution(Descriptor d,
2514 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2515 getIHMcontributions(d);
2517 // ensure inputs to initial contexts increase monotonically
2518 ReachGraph merged = new ReachGraph();
2520 merged.merge( heapsFromCallers.get( fc ) );
2522 heapsFromCallers.put( fc, merged );
2527 private AllocSite createParameterAllocSite(ReachGraph rg,
2528 TempDescriptor tempDesc,
2534 flatNew = new FlatNew(tempDesc.getType(), // type
2535 tempDesc, // param temp
2536 false, // global alloc?
2537 "param"+tempDesc // disjoint site ID string
2540 flatNew = new FlatNew(tempDesc.getType(), // type
2541 tempDesc, // param temp
2542 false, // global alloc?
2543 null // disjoint site ID string
2547 // create allocation site
2548 AllocSite as = AllocSite.factory(allocationDepth,
2550 flatNew.getDisjointId(),
2553 for (int i = 0; i < allocationDepth; ++i) {
2554 Integer id = generateUniqueHeapRegionNodeID();
2555 as.setIthOldest(i, id);
2556 mapHrnIdToAllocSite.put(id, as);
2558 // the oldest node is a summary node
2559 as.setSummary(generateUniqueHeapRegionNodeID() );
2567 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc) {
2569 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
2570 if(!typeDesc.isImmutable()) {
2571 ClassDescriptor classDesc = typeDesc.getClassDesc();
2572 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2573 FieldDescriptor field = (FieldDescriptor) it.next();
2574 TypeDescriptor fieldType = field.getType();
2575 if (shouldAnalysisTrack(fieldType)) {
2576 fieldSet.add(field);
2584 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha) {
2586 int dimCount=fd.getType().getArrayCount();
2587 HeapRegionNode prevNode=null;
2588 HeapRegionNode arrayEntryNode=null;
2589 for(int i=dimCount; i>0; i--) {
2590 TypeDescriptor typeDesc=fd.getType().dereference(); //hack to get instance of type desc
2591 typeDesc.setArrayCount(i);
2592 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
2593 HeapRegionNode hrnSummary;
2594 if(!mapToExistingNode.containsKey(typeDesc)) {
2599 as = createParameterAllocSite(rg, tempDesc, false);
2601 // make a new reference to allocated node
2603 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2604 false, // single object?
2606 false, // out-of-context?
2607 as.getType(), // type
2608 as, // allocation site
2609 alpha, // inherent reach
2610 alpha, // current reach
2611 ExistPredSet.factory(rg.predTrue), // predicates
2612 tempDesc.toString() // description
2614 rg.id2hrn.put(as.getSummary(),hrnSummary);
2616 mapToExistingNode.put(typeDesc, hrnSummary);
2618 hrnSummary=mapToExistingNode.get(typeDesc);
2621 if(prevNode==null) {
2622 // make a new reference between new summary node and source
2623 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2626 fd.getSymbol(), // field name
2628 ExistPredSet.factory(rg.predTrue), // predicates
2632 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2633 prevNode=hrnSummary;
2634 arrayEntryNode=hrnSummary;
2636 // make a new reference between summary nodes of array
2637 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2640 arrayElementFieldName, // field name
2642 ExistPredSet.factory(rg.predTrue), // predicates
2646 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2647 prevNode=hrnSummary;
2652 // create a new obj node if obj has at least one non-primitive field
2653 TypeDescriptor type=fd.getType();
2654 if(getFieldSetTobeAnalyzed(type).size()>0) {
2655 TypeDescriptor typeDesc=type.dereference();
2656 typeDesc.setArrayCount(0);
2657 if(!mapToExistingNode.containsKey(typeDesc)) {
2658 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
2659 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
2660 // make a new reference to allocated node
2661 HeapRegionNode hrnSummary =
2662 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2663 false, // single object?
2665 false, // out-of-context?
2667 as, // allocation site
2668 alpha, // inherent reach
2669 alpha, // current reach
2670 ExistPredSet.factory(rg.predTrue), // predicates
2671 tempDesc.toString() // description
2673 rg.id2hrn.put(as.getSummary(),hrnSummary);
2674 mapToExistingNode.put(typeDesc, hrnSummary);
2675 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2678 arrayElementFieldName, // field name
2680 ExistPredSet.factory(rg.predTrue), // predicates
2683 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2684 prevNode=hrnSummary;
2686 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
2687 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null) {
2688 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2691 arrayElementFieldName, // field name
2693 ExistPredSet.factory(rg.predTrue), // predicates
2696 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2698 prevNode=hrnSummary;
2702 map.put(arrayEntryNode, prevNode);
2703 return arrayEntryNode;
2706 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
2707 ReachGraph rg = new ReachGraph();
2708 TaskDescriptor taskDesc = fm.getTask();
2710 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
2711 Descriptor paramDesc = taskDesc.getParameter(idx);
2712 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
2714 // setup data structure
2715 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
2716 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
2717 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
2718 new Hashtable<TypeDescriptor, HeapRegionNode>();
2719 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
2720 new Hashtable<HeapRegionNode, HeapRegionNode>();
2721 Set<String> doneSet = new HashSet<String>();
2723 TempDescriptor tempDesc = fm.getParameter(idx);
2725 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
2726 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
2727 Integer idNewest = as.getIthOldest(0);
2728 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
2730 // make a new reference to allocated node
2731 RefEdge edgeNew = new RefEdge(lnX, // source
2733 taskDesc.getParamType(idx), // type
2735 hrnNewest.getAlpha(), // beta
2736 ExistPredSet.factory(rg.predTrue), // predicates
2739 rg.addRefEdge(lnX, hrnNewest, edgeNew);
2741 // set-up a work set for class field
2742 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
2743 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2744 FieldDescriptor fd = (FieldDescriptor) it.next();
2745 TypeDescriptor fieldType = fd.getType();
2746 if (shouldAnalysisTrack(fieldType)) {
2747 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
2748 newMap.put(hrnNewest, fd);
2749 workSet.add(newMap);
2753 int uniqueIdentifier = 0;
2754 while (!workSet.isEmpty()) {
2755 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
2757 workSet.remove(map);
2759 Set<HeapRegionNode> key = map.keySet();
2760 HeapRegionNode srcHRN = key.iterator().next();
2761 FieldDescriptor fd = map.get(srcHRN);
2762 TypeDescriptor type = fd.getType();
2763 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
2765 if (!doneSet.contains(doneSetIdentifier)) {
2766 doneSet.add(doneSetIdentifier);
2767 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
2768 // create new summary Node
2769 TempDescriptor td = new TempDescriptor("temp"
2770 + uniqueIdentifier, type);
2772 AllocSite allocSite;
2773 if(type.equals(paramTypeDesc)) {
2774 //corresponding allocsite has already been created for a parameter variable.
2777 allocSite = createParameterAllocSite(rg, td, false);
2779 String strDesc = allocSite.toStringForDOT()
2781 TypeDescriptor allocType=allocSite.getType();
2783 HeapRegionNode hrnSummary;
2784 if(allocType.isArray() && allocType.getArrayCount()>0) {
2785 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
2788 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
2789 false, // single object?
2791 false, // out-of-context?
2792 allocSite.getType(), // type
2793 allocSite, // allocation site
2794 hrnNewest.getAlpha(), // inherent reach
2795 hrnNewest.getAlpha(), // current reach
2796 ExistPredSet.factory(rg.predTrue), // predicates
2797 strDesc // description
2799 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
2801 // make a new reference to summary node
2802 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2805 fd.getSymbol(), // field name
2806 hrnNewest.getAlpha(), // beta
2807 ExistPredSet.factory(rg.predTrue), // predicates
2811 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2815 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2817 // set-up a work set for fields of the class
2818 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2819 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2821 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2823 HeapRegionNode newDstHRN;
2824 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)) {
2825 //related heap region node is already exsited.
2826 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2828 newDstHRN=hrnSummary;
2830 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2831 if(!doneSet.contains(doneSetIdentifier)) {
2832 // add new work item
2833 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2834 new HashMap<HeapRegionNode, FieldDescriptor>();
2835 newMap.put(newDstHRN, fieldDescriptor);
2836 workSet.add(newMap);
2841 // if there exists corresponding summary node
2842 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2844 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2846 fd.getType(), // type
2847 fd.getSymbol(), // field name
2848 srcHRN.getAlpha(), // beta
2849 ExistPredSet.factory(rg.predTrue), // predicates
2852 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2862 // return all allocation sites in the method (there is one allocation
2863 // site per FlatNew node in a method)
2864 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2865 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2866 buildAllocationSiteSet(d);
2869 return mapDescriptorToAllocSiteSet.get(d);
2873 private void buildAllocationSiteSet(Descriptor d) {
2874 HashSet<AllocSite> s = new HashSet<AllocSite>();
2877 if( d instanceof MethodDescriptor ) {
2878 fm = state.getMethodFlat( (MethodDescriptor) d);
2880 assert d instanceof TaskDescriptor;
2881 fm = state.getMethodFlat( (TaskDescriptor) d);
2883 pm.analyzeMethod(fm);
2885 // visit every node in this FlatMethod's IR graph
2886 // and make a set of the allocation sites from the
2887 // FlatNew node's visited
2888 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2889 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2892 while( !toVisit.isEmpty() ) {
2893 FlatNode n = toVisit.iterator().next();
2895 if( n instanceof FlatNew ) {
2896 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2902 for( int i = 0; i < pm.numNext(n); ++i ) {
2903 FlatNode child = pm.getNext(n, i);
2904 if( !visited.contains(child) ) {
2910 mapDescriptorToAllocSiteSet.put(d, s);
2913 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
2915 HashSet<AllocSite> out = new HashSet<AllocSite>();
2916 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2917 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2921 while (!toVisit.isEmpty()) {
2922 Descriptor d = toVisit.iterator().next();
2926 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2927 Iterator asItr = asSet.iterator();
2928 while (asItr.hasNext()) {
2929 AllocSite as = (AllocSite) asItr.next();
2930 if (as.getDisjointAnalysisId() != null) {
2935 // enqueue callees of this method to be searched for
2936 // allocation sites also
2937 Set callees = callGraph.getCalleeSet(d);
2938 if (callees != null) {
2939 Iterator methItr = callees.iterator();
2940 while (methItr.hasNext()) {
2941 MethodDescriptor md = (MethodDescriptor) methItr.next();
2943 if (!visited.contains(md)) {
2954 private HashSet<AllocSite>
2955 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
2957 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
2958 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2959 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2963 // traverse this task and all methods reachable from this task
2964 while( !toVisit.isEmpty() ) {
2965 Descriptor d = toVisit.iterator().next();
2969 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2970 Iterator asItr = asSet.iterator();
2971 while( asItr.hasNext() ) {
2972 AllocSite as = (AllocSite) asItr.next();
2973 TypeDescriptor typed = as.getType();
2974 if( typed != null ) {
2975 ClassDescriptor cd = typed.getClassDesc();
2976 if( cd != null && cd.hasFlags() ) {
2982 // enqueue callees of this method to be searched for
2983 // allocation sites also
2984 Set callees = callGraph.getCalleeSet(d);
2985 if( callees != null ) {
2986 Iterator methItr = callees.iterator();
2987 while( methItr.hasNext() ) {
2988 MethodDescriptor md = (MethodDescriptor) methItr.next();
2990 if( !visited.contains(md) ) {
3000 public Set<Descriptor> getDescriptorsToAnalyze() {
3001 return descriptorsToAnalyze;
3004 public EffectsAnalysis getEffectsAnalysis() {
3005 return effectsAnalysis;
3008 public ReachGraph getReachGraph(Descriptor d) {
3009 return mapDescriptorToCompleteReachGraph.get(d);
3012 public ReachGraph getEnterReachGraph(FlatNode fn) {
3013 return fn2rgAtEnter.get(fn);
3018 protected class DebugCallSiteData {
3019 public boolean debugCallSite;
3020 public boolean didOneDebug;
3021 public boolean writeDebugDOTs;
3022 public boolean stopAfter;
3024 public DebugCallSiteData() {
3025 debugCallSite = false;
3026 didOneDebug = false;
3027 writeDebugDOTs = false;
3032 protected void decideDebugCallSite( DebugCallSiteData dcsd,
3033 Descriptor taskOrMethodCaller,
3034 MethodDescriptor mdCallee ) {
3036 // all this jimma jamma to debug call sites is WELL WORTH the
3037 // effort, so so so many bugs or buggy info appears through call
3040 if( state.DISJOINTDEBUGCALLEE == null ||
3041 state.DISJOINTDEBUGCALLER == null ) {
3046 boolean debugCalleeMatches = false;
3047 boolean debugCallerMatches = false;
3049 ClassDescriptor cdCallee = mdCallee.getClassDesc();
3050 if( cdCallee != null ) {
3051 debugCalleeMatches =
3052 state.DISJOINTDEBUGCALLEE.equals( cdCallee.getSymbol()+
3054 mdCallee.getSymbol()
3059 if( taskOrMethodCaller instanceof MethodDescriptor ) {
3060 ClassDescriptor cdCaller = ((MethodDescriptor)taskOrMethodCaller).getClassDesc();
3061 if( cdCaller != null ) {
3062 debugCallerMatches =
3063 state.DISJOINTDEBUGCALLER.equals( cdCaller.getSymbol()+
3065 taskOrMethodCaller.getSymbol()
3069 // for bristlecone style tasks
3070 debugCallerMatches =
3071 state.DISJOINTDEBUGCALLER.equals( taskOrMethodCaller.getSymbol() );
3075 dcsd.debugCallSite = debugCalleeMatches && debugCallerMatches;
3078 dcsd.writeDebugDOTs =
3080 dcsd.debugCallSite &&
3082 (ReachGraph.debugCallSiteVisitCounter >=
3083 ReachGraph.debugCallSiteVisitStartCapture) &&
3085 (ReachGraph.debugCallSiteVisitCounter <
3086 ReachGraph.debugCallSiteVisitStartCapture +
3087 ReachGraph.debugCallSiteNumVisitsToCapture);
3091 if( dcsd.debugCallSite ) {
3092 dcsd.didOneDebug = true;
3096 protected void statusDebugCallSite( DebugCallSiteData dcsd ) {
3098 dcsd.writeDebugDOTs = false;
3099 dcsd.stopAfter = false;
3101 if( dcsd.didOneDebug ) {
3102 System.out.println(" $$$ Debug call site visit "+
3103 ReachGraph.debugCallSiteVisitCounter+
3107 (ReachGraph.debugCallSiteVisitCounter >=
3108 ReachGraph.debugCallSiteVisitStartCapture) &&
3110 (ReachGraph.debugCallSiteVisitCounter <
3111 ReachGraph.debugCallSiteVisitStartCapture +
3112 ReachGraph.debugCallSiteNumVisitsToCapture)
3114 dcsd.writeDebugDOTs = true;
3115 System.out.println(" $$$ Capturing this call site visit $$$");
3116 if( ReachGraph.debugCallSiteStopAfter &&
3117 (ReachGraph.debugCallSiteVisitCounter ==
3118 ReachGraph.debugCallSiteVisitStartCapture +
3119 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
3121 dcsd.stopAfter = true;
3125 ++ReachGraph.debugCallSiteVisitCounter;
3128 if( dcsd.stopAfter ) {
3129 System.out.println("$$$ Exiting after requested captures of call site. $$$");
3138 // get successive captures of the analysis state, use compiler
3140 boolean takeDebugSnapshots = false;
3141 String descSymbolDebug = null;
3142 boolean stopAfterCapture = false;
3143 int snapVisitCounter = 0;
3144 int snapNodeCounter = 0;
3145 int visitStartCapture = 0;
3146 int numVisitsToCapture = 0;
3149 void debugSnapshot(ReachGraph rg, FlatNode fn, boolean in) {
3150 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
3158 if( snapVisitCounter >= visitStartCapture ) {
3159 System.out.println(" @@@ snapping visit="+snapVisitCounter+
3160 ", node="+snapNodeCounter+
3164 graphName = String.format("snap%03d_%04din",
3168 graphName = String.format("snap%03d_%04dout",
3173 graphName = graphName + fn;
3175 rg.writeGraph(graphName,
3176 true, // write labels (variables)
3177 true, // selectively hide intermediate temp vars
3178 true, // prune unreachable heap regions
3179 false, // hide reachability
3180 true, // hide subset reachability states
3181 true, // hide predicates
3182 true); // hide edge taints
3189 public Set<Alloc> canPointToAt( TempDescriptor x,
3190 FlatNode programPoint ) {
3192 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3193 if( rgAtEnter == null ) {
3197 return rgAtEnter.canPointTo( x );
3201 public Hashtable< Alloc, Set<Alloc> > canPointToAt( TempDescriptor x,
3203 FlatNode programPoint ) {
3205 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3206 if( rgAtEnter == null ) {
3210 return rgAtEnter.canPointTo( x, f.getSymbol(), f.getType() );
3214 public Hashtable< Alloc, Set<Alloc> > canPointToAtElement( TempDescriptor x,
3215 FlatNode programPoint ) {
3217 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3218 if( rgAtEnter == null ) {
3222 assert x.getType() != null;
3223 assert x.getType().isArray();
3225 return rgAtEnter.canPointTo( x, arrayElementFieldName, x.getType().dereference() );
3229 public Set<Alloc> canPointToAfter( TempDescriptor x,
3230 FlatNode programPoint ) {
3232 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3234 if( rgAtExit == null ) {
3238 return rgAtExit.canPointTo( x );
3242 public Hashtable< Alloc, Set<Alloc> > canPointToAfter( TempDescriptor x,
3244 FlatNode programPoint ) {
3246 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3247 if( rgAtExit == null ) {
3251 return rgAtExit.canPointTo( x, f.getSymbol(), f.getType() );
3255 public Hashtable< Alloc, Set<Alloc> > canPointToAfterElement( TempDescriptor x,
3256 FlatNode programPoint ) {
3258 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3259 if( rgAtExit == null ) {
3263 assert x.getType() != null;
3264 assert x.getType().isArray();
3266 return rgAtExit.canPointTo( x, arrayElementFieldName, x.getType().dereference() );
3270 // to evaluate convergence behavior
3271 private static long totalMethodVisits = 0;
3272 private static long totalNodeVisits = 0;